id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
157,612 | from sanic import Sanic, Blueprint
from sanic.response import text
def request_middleware_1(request):
print('1') | null |
157,613 | from sanic import Sanic, Blueprint
from sanic.response import text
def request_middleware_2(request):
print('2') | null |
157,614 | from sanic import Sanic, Blueprint
from sanic.response import text
def request_middleware_3(request):
print('3') | null |
157,615 | from sanic import Sanic, Blueprint
from sanic.response import text
def resp_middleware_4(request, response):
print('4') | null |
157,616 | from sanic import Sanic, Blueprint
from sanic.response import text
def resp_middleware_5(request, response):
print('5') | null |
157,617 | from sanic import Sanic, Blueprint
from sanic.response import text
def resp_middleware_6(request, response):
print('6') | null |
157,618 | from sanic import Sanic, Blueprint
from sanic.response import text
def text(
body: str,
status: int = 200,
headers: Optional[Dict[str, str]] = None,
content_type: str = "text/plain; charset=utf-8",
) -> HTTPResponse:
"""
Returns response object with body in text format.
:param body: Response data to be encoded.
:param status: Response code.
:param headers: Custom Headers.
:param content_type: the content type (string) of the response
"""
if not isinstance(body, str):
raise TypeError(
f"Bad body type. Expected str, got {type(body).__name__})"
)
return HTTPResponse(
body, status=status, headers=headers, content_type=content_type
)
def pop_handler(request):
return text('hello world') | null |
157,619 | from pathlib import Path
from sanic import Sanic, response
def handler_text(request):
return response.text("Hello") | null |
157,620 | from pathlib import Path
from sanic import Sanic, response
def handler_json(request):
return response.json({"foo": "bar"}) | null |
157,621 | from pathlib import Path
from sanic import Sanic, response
async def handler_ws(request, ws):
name = "<someone>"
while True:
data = f"Hello {name}"
await ws.send(data)
name = await ws.recv()
if not name:
break | null |
157,622 | from pathlib import Path
from sanic import Sanic, response
async def handler_file(request):
return await response.file(Path("../") / "setup.py") | null |
157,623 | from pathlib import Path
from sanic import Sanic, response
async def handler_file_stream(request):
return await response.file_stream(
Path("../") / "setup.py", chunk_size=1024
) | null |
157,624 | from pathlib import Path
from sanic import Sanic, response
async def handler_stream(request):
while True:
body = await request.stream.read()
if body is None:
break
body = body.decode("utf-8").replace("1", "A")
await response.write(body)
return response.stream(body) | null |
157,625 | from pathlib import Path
from sanic import Sanic, response
async def listener_before_server_start(*args, **kwargs):
print("before_server_start") | null |
157,626 | from pathlib import Path
from sanic import Sanic, response
async def listener_after_server_start(*args, **kwargs):
print("after_server_start") | null |
157,627 | from pathlib import Path
from sanic import Sanic, response
async def listener_before_server_stop(*args, **kwargs):
print("before_server_stop") | null |
157,628 | from pathlib import Path
from sanic import Sanic, response
async def listener_after_server_stop(*args, **kwargs):
print("after_server_stop") | null |
157,629 | from pathlib import Path
from sanic import Sanic, response
async def print_on_request(request):
print("print_on_request") | null |
157,630 | from pathlib import Path
from sanic import Sanic, response
async def print_on_response(request, response):
print("print_on_response") | null |
157,631 | from sanic import Sanic
from sanic.response import json
import asyncio
import aiohttp
sem = None
def init(sanic, loop):
global sem
concurrency_per_worker = 4
sem = asyncio.Semaphore(concurrency_per_worker, loop=loop) | null |
157,632 | from sanic import Sanic
from sanic.response import json
import asyncio
import aiohttp
sem = None
def json(
body: Any,
status: int = 200,
headers: Optional[Dict[str, str]] = None,
content_type: str = "application/json",
dumps: Optional[Callable[..., str]] = None,
**kwargs,
) -> HTTPResponse:
"""
Returns response object with body in json format.
:param body: Response data to be serialized.
:param status: Response code.
:param headers: Custom Headers.
:param kwargs: Remaining arguments that are passed to the json encoder.
"""
if not dumps:
dumps = BaseHTTPResponse._dumps
return HTTPResponse(
dumps(body, **kwargs),
headers=headers,
status=status,
content_type=content_type,
)
The provided code snippet includes necessary dependencies for implementing the `bounded_fetch` function. Write a Python function `async def bounded_fetch(session, url)` to solve the following problem:
Use session object to perform 'get' request on url
Here is the function:
async def bounded_fetch(session, url):
"""
Use session object to perform 'get' request on url
"""
async with sem, session.get(url) as response:
return await response.json() | Use session object to perform 'get' request on url |
157,633 | from sanic import Sanic, response
from sanic.blueprints import Blueprint
async def hello_0(request):
return response.text("Some defaults") | null |
157,634 | from sanic import Sanic, response
from sanic.blueprints import Blueprint
async def hello_1(request):
return response.text("42") | null |
157,635 | from sanic import Sanic, response
from sanic.blueprints import Blueprint
async def hello_2(request):
return response.text("What is the meaning of life?") | null |
157,636 | from sanic import Sanic, response
from sanic.blueprints import Blueprint
async def hello_3(request):
return response.text("42") | null |
157,637 | from sanic import Sanic, response
from sanic.blueprints import Blueprint
app = Sanic(__name__)
app.blueprint(bp)
def name(request):
return response.text(request.app.url_for("name", _external=True)) | null |
157,638 | from sanic import Sanic
from functools import wraps
from sanic.response import json
def check_request_for_authorization_status(request):
def json(
body: Any,
status: int = 200,
headers: Optional[Dict[str, str]] = None,
content_type: str = "application/json",
dumps: Optional[Callable[..., str]] = None,
**kwargs,
) -> HTTPResponse:
def authorized(f):
@wraps(f)
async def decorated_function(request, *args, **kwargs):
# run some method that checks the request
# for the client's authorization status
is_authorized = check_request_for_authorization_status(request)
if is_authorized:
# the user is authorized.
# run the handler method and return the response
response = await f(request, *args, **kwargs)
return response
else:
# the user is not authorized.
return json({'status': 'not_authorized'}, 403)
return decorated_function | null |
157,639 | from sanic import Sanic
from sanic.response import redirect
def redirect(
to: str,
headers: Optional[Dict[str, str]] = None,
status: int = 302,
content_type: str = "text/html; charset=utf-8",
) -> HTTPResponse:
"""
Abort execution and cause a 302 redirect (by default) by setting a
Location header.
:param to: path or fully qualified URL to redirect to
:param headers: optional dict of headers to include in the new request
:param status: status code (int) of the new request, defaults to 302
:param content_type: the content type (string) of the response
"""
headers = headers or {}
# URL Quote the URL before redirecting
safe_to = quote_plus(to, safe=":/%#?&=@[]!$&'()*+,;")
# According to RFC 7231, a relative URI is now permitted.
headers["Location"] = safe_to
return HTTPResponse(
status=status, headers=headers, content_type=content_type
)
def index(request):
return redirect("index.html") | null |
157,640 | from sanic import Sanic
from sanic.response import redirect
async def feed(request, ws):
while True:
data = 'hello!'
print('Sending: ' + data)
await ws.send(data)
data = await ws.recv()
print('Received: ' + data) | null |
157,641 | from sanic import Sanic
from sanic import response
def handle_request(request):
return response.redirect('/redirect') | null |
157,642 | import itertools
import os
import signal
import subprocess
import sys
from time import sleep
def _iter_module_files():
"""This iterates over all relevant Python files.
It goes through all
loaded files from modules, all files in folders of already loaded modules
as well as all files reachable through a package.
"""
# The list call is necessary on Python 3 in case the module
# dictionary modifies during iteration.
for module in list(sys.modules.values()):
if module is None:
continue
filename = getattr(module, "__file__", None)
if filename:
old = None
while not os.path.isfile(filename):
old = filename
filename = os.path.dirname(filename)
if filename == old:
break
else:
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
yield filename
def restart_with_reloader(changed=None):
"""Create a new process and a subprocess in it with the same arguments as
this one.
"""
reloaded = ",".join(changed) if changed else ""
return subprocess.Popen(
_get_args_for_reloading(),
env={
**os.environ,
"SANIC_SERVER_RUNNING": "true",
"SANIC_RELOADER_PROCESS": "true",
"SANIC_RELOADED_FILES": reloaded,
},
)
def _check_file(filename, mtimes):
need_reload = False
mtime = os.stat(filename).st_mtime
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
elif mtime > old_time:
mtimes[filename] = mtime
need_reload = True
return need_reload
The provided code snippet includes necessary dependencies for implementing the `watchdog` function. Write a Python function `def watchdog(sleep_interval, app)` to solve the following problem:
Watch project files, restart worker process if a change happened. :param sleep_interval: interval in second. :return: Nothing
Here is the function:
def watchdog(sleep_interval, app):
"""Watch project files, restart worker process if a change happened.
:param sleep_interval: interval in second.
:return: Nothing
"""
def interrupt_self(*args):
raise KeyboardInterrupt
mtimes = {}
signal.signal(signal.SIGTERM, interrupt_self)
if os.name == "nt":
signal.signal(signal.SIGBREAK, interrupt_self)
worker_process = restart_with_reloader()
try:
while True:
changed = set()
for filename in itertools.chain(
_iter_module_files(),
*(d.glob("**/*") for d in app.reload_dirs),
):
try:
if _check_file(filename, mtimes):
path = (
filename
if isinstance(filename, str)
else filename.resolve()
)
changed.add(str(path))
except OSError:
continue
if changed:
worker_process.terminate()
worker_process.wait()
worker_process = restart_with_reloader(changed)
sleep(sleep_interval)
except KeyboardInterrupt:
pass
finally:
worker_process.terminate()
worker_process.wait() | Watch project files, restart worker process if a change happened. :param sleep_interval: interval in second. :return: Nothing |
157,643 | import os
import ssl
from typing import Iterable, Optional, Union
from sanic.log import logger
CIPHERS_TLS12 = [
"ECDHE-ECDSA-CHACHA20-POLY1305",
"ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-ECDSA-AES128-GCM-SHA256",
"ECDHE-RSA-CHACHA20-POLY1305",
"ECDHE-RSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES128-GCM-SHA256",
]
def server_name_callback(
sslobj: ssl.SSLObject, server_name: str, ctx: ssl.SSLContext
) -> None:
"""Store the received SNI as sslobj.sanic_server_name."""
sslobj.sanic_server_name = server_name # type: ignore
The provided code snippet includes necessary dependencies for implementing the `create_context` function. Write a Python function `def create_context( certfile: Optional[str] = None, keyfile: Optional[str] = None, password: Optional[str] = None, ) -> ssl.SSLContext` to solve the following problem:
Create a context with secure crypto and HTTP/1.1 in protocols.
Here is the function:
def create_context(
certfile: Optional[str] = None,
keyfile: Optional[str] = None,
password: Optional[str] = None,
) -> ssl.SSLContext:
"""Create a context with secure crypto and HTTP/1.1 in protocols."""
context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH)
context.minimum_version = ssl.TLSVersion.TLSv1_2
context.set_ciphers(":".join(CIPHERS_TLS12))
context.set_alpn_protocols(["http/1.1"])
context.sni_callback = server_name_callback
if certfile and keyfile:
context.load_cert_chain(certfile, keyfile, password)
return context | Create a context with secure crypto and HTTP/1.1 in protocols. |
157,644 | import os
import ssl
from typing import Iterable, Optional, Union
from sanic.log import logger
def shorthand_to_ctx(
ctxdef: Union[None, ssl.SSLContext, dict, str]
) -> Optional[ssl.SSLContext]:
"""Convert an ssl argument shorthand to an SSLContext object."""
if ctxdef is None or isinstance(ctxdef, ssl.SSLContext):
return ctxdef
if isinstance(ctxdef, str):
return load_cert_dir(ctxdef)
if isinstance(ctxdef, dict):
return CertSimple(**ctxdef)
raise ValueError(
f"Invalid ssl argument {type(ctxdef)}."
" Expecting a list of certdirs, a dict or an SSLContext."
)
class CertSelector(ssl.SSLContext):
"""Automatically select SSL certificate based on the hostname that the
client is trying to access, via SSL SNI. Paths to certificate folders
with privkey.pem and fullchain.pem in them should be provided, and
will be matched in the order given whenever there is a new connection.
"""
def __new__(cls, ctxs):
return super().__new__(cls)
def __init__(self, ctxs: Iterable[Optional[ssl.SSLContext]]):
super().__init__()
self.sni_callback = selector_sni_callback # type: ignore
self.sanic_select = []
self.sanic_fallback = None
all_names = []
for i, ctx in enumerate(ctxs):
if not ctx:
continue
names = dict(getattr(ctx, "sanic", {})).get("names", [])
all_names += names
self.sanic_select.append(ctx)
if i == 0:
self.sanic_fallback = ctx
if not all_names:
raise ValueError(
"No certificates with SubjectAlternativeNames found."
)
logger.info(f"Certificate vhosts: {', '.join(all_names)}")
The provided code snippet includes necessary dependencies for implementing the `process_to_context` function. Write a Python function `def process_to_context( ssldef: Union[None, ssl.SSLContext, dict, str, list, tuple] ) -> Optional[ssl.SSLContext]` to solve the following problem:
Process app.run ssl argument from easy formats to full SSLContext.
Here is the function:
def process_to_context(
ssldef: Union[None, ssl.SSLContext, dict, str, list, tuple]
) -> Optional[ssl.SSLContext]:
"""Process app.run ssl argument from easy formats to full SSLContext."""
return (
CertSelector(map(shorthand_to_ctx, ssldef))
if isinstance(ssldef, (list, tuple))
else shorthand_to_ctx(ssldef)
) | Process app.run ssl argument from easy formats to full SSLContext. |
157,645 | import os
import ssl
from typing import Iterable, Optional, Union
from sanic.log import logger
class CertSelector(ssl.SSLContext):
"""Automatically select SSL certificate based on the hostname that the
client is trying to access, via SSL SNI. Paths to certificate folders
with privkey.pem and fullchain.pem in them should be provided, and
will be matched in the order given whenever there is a new connection.
"""
def __new__(cls, ctxs):
return super().__new__(cls)
def __init__(self, ctxs: Iterable[Optional[ssl.SSLContext]]):
super().__init__()
self.sni_callback = selector_sni_callback # type: ignore
self.sanic_select = []
self.sanic_fallback = None
all_names = []
for i, ctx in enumerate(ctxs):
if not ctx:
continue
names = dict(getattr(ctx, "sanic", {})).get("names", [])
all_names += names
self.sanic_select.append(ctx)
if i == 0:
self.sanic_fallback = ctx
if not all_names:
raise ValueError(
"No certificates with SubjectAlternativeNames found."
)
logger.info(f"Certificate vhosts: {', '.join(all_names)}")
def find_cert(self: CertSelector, server_name: str):
"""Find the first certificate that matches the given SNI.
:raises ssl.CertificateError: No matching certificate found.
:return: A matching ssl.SSLContext object if found."""
if not server_name:
if self.sanic_fallback:
return self.sanic_fallback
raise ValueError(
"The client provided no SNI to match for certificate."
)
for ctx in self.sanic_select:
if match_hostname(ctx, server_name):
return ctx
if self.sanic_fallback:
return self.sanic_fallback
raise ValueError(f"No certificate found matching hostname {server_name!r}")
def server_name_callback(
sslobj: ssl.SSLObject, server_name: str, ctx: ssl.SSLContext
) -> None:
"""Store the received SNI as sslobj.sanic_server_name."""
sslobj.sanic_server_name = server_name # type: ignore
logger = logging.getLogger("sanic.root")
The provided code snippet includes necessary dependencies for implementing the `selector_sni_callback` function. Write a Python function `def selector_sni_callback( sslobj: ssl.SSLObject, server_name: str, ctx: CertSelector ) -> Optional[int]` to solve the following problem:
Select a certificate mathing the SNI.
Here is the function:
def selector_sni_callback(
sslobj: ssl.SSLObject, server_name: str, ctx: CertSelector
) -> Optional[int]:
"""Select a certificate mathing the SNI."""
# Call server_name_callback to store the SNI on sslobj
server_name_callback(sslobj, server_name, ctx)
# Find a new context matching the hostname
try:
sslobj.context = find_cert(ctx, server_name)
except ValueError as e:
logger.warning(f"Rejecting TLS connection: {e}")
# This would show ERR_SSL_UNRECOGNIZED_NAME_ALERT on client side if
# asyncio/uvloop did proper SSL shutdown. They don't.
return ssl.ALERT_DESCRIPTION_UNRECOGNIZED_NAME
return None # mypy complains without explicit return | Select a certificate mathing the SNI. |
157,646 | from __future__ import annotations
import asyncio
from collections import defaultdict
from copy import deepcopy
from functools import wraps
from inspect import isfunction
from itertools import chain
from types import SimpleNamespace
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Union,
)
from sanic_routing.exceptions import NotFound
from sanic_routing.route import Route
from sanic.base import BaseSanic
from sanic.blueprint_group import BlueprintGroup
from sanic.exceptions import SanicException
from sanic.helpers import Default, _default
from sanic.models.futures import FutureRoute, FutureStatic
from sanic.models.handler_types import (
ListenerType,
MiddlewareType,
RouteHandler,
)
def lazy(func, as_decorator=True):
@wraps(func)
def decorator(bp, *args, **kwargs):
nonlocal as_decorator
kwargs["apply"] = False
pass_handler = None
if args and isfunction(args[0]):
as_decorator = False
def wrapper(handler):
future = func(bp, *args, **kwargs)
if as_decorator:
future = future(handler)
if bp.registered:
for app in bp.apps:
bp.register(app, {})
return future
return wrapper if as_decorator else wrapper(pass_handler)
return decorator | null |
157,647 | import types
from importlib.util import module_from_spec, spec_from_file_location
from os import environ as os_environ
from pathlib import Path
from re import findall as re_findall
from typing import Union
from sanic.exceptions import LoadFileException, PyFileError
from sanic.helpers import import_string
The provided code snippet includes necessary dependencies for implementing the `str_to_bool` function. Write a Python function `def str_to_bool(val: str) -> bool` to solve the following problem:
Takes string and tries to turn it into bool as human would do. If val is in case insensitive ( "y", "yes", "yep", "yup", "t", "true", "on", "enable", "enabled", "1" ) returns True. If val is in case insensitive ( "n", "no", "f", "false", "off", "disable", "disabled", "0" ) returns False. Else Raise ValueError.
Here is the function:
def str_to_bool(val: str) -> bool:
"""Takes string and tries to turn it into bool as human would do.
If val is in case insensitive (
"y", "yes", "yep", "yup", "t",
"true", "on", "enable", "enabled", "1"
) returns True.
If val is in case insensitive (
"n", "no", "f", "false", "off", "disable", "disabled", "0"
) returns False.
Else Raise ValueError."""
val = val.lower()
if val in {
"y",
"yes",
"yep",
"yup",
"t",
"true",
"on",
"enable",
"enabled",
"1",
}:
return True
elif val in {"n", "no", "f", "false", "off", "disable", "disabled", "0"}:
return False
else:
raise ValueError(f"Invalid truth value {val}") | Takes string and tries to turn it into bool as human would do. If val is in case insensitive ( "y", "yes", "yep", "yup", "t", "true", "on", "enable", "enabled", "1" ) returns True. If val is in case insensitive ( "n", "no", "f", "false", "off", "disable", "disabled", "0" ) returns False. Else Raise ValueError. |
157,648 | import types
from importlib.util import module_from_spec, spec_from_file_location
from os import environ as os_environ
from pathlib import Path
from re import findall as re_findall
from typing import Union
from sanic.exceptions import LoadFileException, PyFileError
from sanic.helpers import import_string
class PyFileError(Exception):
def __init__(self, file):
super().__init__("could not execute config file %s", file)
class LoadFileException(SanicException):
pass
def import_string(module_name, package=None):
"""
import a module or class by string path.
:module_name: str with path of module or path to import and
instanciate a class
:returns: a module object or one instance from class if
module_name is a valid path to class
"""
module, klass = module_name.rsplit(".", 1)
module = import_module(module, package=package)
obj = getattr(module, klass)
if ismodule(obj):
return obj
return obj()
The provided code snippet includes necessary dependencies for implementing the `load_module_from_file_location` function. Write a Python function `def load_module_from_file_location( location: Union[bytes, str, Path], encoding: str = "utf8", *args, **kwargs )` to solve the following problem:
Returns loaded module provided as a file path. :param args: Coresponds to importlib.util.spec_from_file_location location parameters,but with this differences: - It has to be of a string or bytes type. - You can also use here environment variables in format ${some_env_var}. Mark that $some_env_var will not be resolved as environment variable. :encoding: If location parameter is of a bytes type, then use this encoding to decode it into string. :param args: Coresponds to the rest of importlib.util.spec_from_file_location parameters. :param kwargs: Coresponds to the rest of importlib.util.spec_from_file_location parameters. For example You can: some_module = load_module_from_file_location( "some_module_name", "/some/path/${some_env_var}" )
Here is the function:
def load_module_from_file_location(
location: Union[bytes, str, Path], encoding: str = "utf8", *args, **kwargs
): # noqa
"""Returns loaded module provided as a file path.
:param args:
Coresponds to importlib.util.spec_from_file_location location
parameters,but with this differences:
- It has to be of a string or bytes type.
- You can also use here environment variables
in format ${some_env_var}.
Mark that $some_env_var will not be resolved as environment variable.
:encoding:
If location parameter is of a bytes type, then use this encoding
to decode it into string.
:param args:
Coresponds to the rest of importlib.util.spec_from_file_location
parameters.
:param kwargs:
Coresponds to the rest of importlib.util.spec_from_file_location
parameters.
For example You can:
some_module = load_module_from_file_location(
"some_module_name",
"/some/path/${some_env_var}"
)
"""
if isinstance(location, bytes):
location = location.decode(encoding)
if isinstance(location, Path) or "/" in location or "$" in location:
if not isinstance(location, Path):
# A) Check if location contains any environment variables
# in format ${some_env_var}.
env_vars_in_location = set(re_findall(r"\${(.+?)}", location))
# B) Check these variables exists in environment.
not_defined_env_vars = env_vars_in_location.difference(
os_environ.keys()
)
if not_defined_env_vars:
raise LoadFileException(
"The following environment variables are not set: "
f"{', '.join(not_defined_env_vars)}"
)
# C) Substitute them in location.
for env_var in env_vars_in_location:
location = location.replace(
"${" + env_var + "}", os_environ[env_var]
)
location = str(location)
if ".py" in location:
name = location.split("/")[-1].split(".")[
0
] # get just the file name without path and .py extension
_mod_spec = spec_from_file_location(
name, location, *args, **kwargs
)
assert _mod_spec is not None # type assertion for mypy
module = module_from_spec(_mod_spec)
_mod_spec.loader.exec_module(module) # type: ignore
else:
module = types.ModuleType("config")
module.__file__ = str(location)
try:
with open(location) as config_file:
exec( # nosec
compile(config_file.read(), location, "exec"),
module.__dict__,
)
except IOError as e:
e.strerror = "Unable to load configuration file (e.strerror)"
raise
except Exception as e:
raise PyFileError(location) from e
return module
else:
try:
return import_string(location)
except ValueError:
raise IOError("Unable to load configuration %s" % str(location)) | Returns loaded module provided as a file path. :param args: Coresponds to importlib.util.spec_from_file_location location parameters,but with this differences: - It has to be of a string or bytes type. - You can also use here environment variables in format ${some_env_var}. Mark that $some_env_var will not be resolved as environment variable. :encoding: If location parameter is of a bytes type, then use this encoding to decode it into string. :param args: Coresponds to the rest of importlib.util.spec_from_file_location parameters. :param kwargs: Coresponds to the rest of importlib.util.spec_from_file_location parameters. For example You can: some_module = load_module_from_file_location( "some_module_name", "/some/path/${some_env_var}" ) |
157,649 | import re
import string
from datetime import datetime
from typing import Dict
_Translator = {
n: "\\%03o" % n for n in set(range(256)) - set(map(ord, _UnescapedChars))
}
_Translator.update({ord('"'): '\\"', ord("\\"): "\\\\"})
_is_legal_key = re.compile("[%s]+" % re.escape(_LegalChars)).fullmatch
The provided code snippet includes necessary dependencies for implementing the `_quote` function. Write a Python function `def _quote(str)` to solve the following problem:
r"""Quote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters.
Here is the function:
def _quote(str):
r"""Quote a string for use in a cookie header.
If the string does not need to be double-quoted, then just return the
string. Otherwise, surround the string in doublequotes and quote
(with a \) special characters.
"""
if str is None or _is_legal_key(str):
return str
else:
return '"' + str.translate(_Translator) + '"' | r"""Quote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters. |
157,650 | from functools import partial
from mimetypes import guess_type
from os import path
from pathlib import PurePath
from typing import (
Any,
AnyStr,
Callable,
Coroutine,
Dict,
Iterator,
Optional,
Tuple,
Union,
)
from urllib.parse import quote_plus
from warnings import warn
from sanic.compat import Header, open_async
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
from sanic.cookies import CookieJar
from sanic.helpers import has_message_body, remove_entity_headers
from sanic.http import Http
from sanic.models.protocol_types import HTMLProtocol, Range
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
def __init__(
self,
body: Optional[AnyStr] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
The provided code snippet includes necessary dependencies for implementing the `empty` function. Write a Python function `def empty( status=204, headers: Optional[Dict[str, str]] = None ) -> HTTPResponse` to solve the following problem:
Returns an empty response to the client. :param status Response code. :param headers Custom Headers.
Here is the function:
def empty(
status=204, headers: Optional[Dict[str, str]] = None
) -> HTTPResponse:
"""
Returns an empty response to the client.
:param status Response code.
:param headers Custom Headers.
"""
return HTTPResponse(body=b"", status=status, headers=headers) | Returns an empty response to the client. :param status Response code. :param headers Custom Headers. |
157,651 | from functools import partial
from mimetypes import guess_type
from os import path
from pathlib import PurePath
from typing import (
Any,
AnyStr,
Callable,
Coroutine,
Dict,
Iterator,
Optional,
Tuple,
Union,
)
from urllib.parse import quote_plus
from warnings import warn
from sanic.compat import Header, open_async
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
from sanic.cookies import CookieJar
from sanic.helpers import has_message_body, remove_entity_headers
from sanic.http import Http
from sanic.models.protocol_types import HTMLProtocol, Range
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
def __init__(
self,
body: Optional[AnyStr] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
The provided code snippet includes necessary dependencies for implementing the `raw` function. Write a Python function `def raw( body: Optional[AnyStr], status: int = 200, headers: Optional[Dict[str, str]] = None, content_type: str = DEFAULT_HTTP_CONTENT_TYPE, ) -> HTTPResponse` to solve the following problem:
Returns response object without encoding the body. :param body: Response data. :param status: Response code. :param headers: Custom Headers. :param content_type: the content type (string) of the response.
Here is the function:
def raw(
body: Optional[AnyStr],
status: int = 200,
headers: Optional[Dict[str, str]] = None,
content_type: str = DEFAULT_HTTP_CONTENT_TYPE,
) -> HTTPResponse:
"""
Returns response object without encoding the body.
:param body: Response data.
:param status: Response code.
:param headers: Custom Headers.
:param content_type: the content type (string) of the response.
"""
return HTTPResponse(
body=body,
status=status,
headers=headers,
content_type=content_type,
) | Returns response object without encoding the body. :param body: Response data. :param status: Response code. :param headers: Custom Headers. :param content_type: the content type (string) of the response. |
157,652 | from functools import partial
from mimetypes import guess_type
from os import path
from pathlib import PurePath
from typing import (
Any,
AnyStr,
Callable,
Coroutine,
Dict,
Iterator,
Optional,
Tuple,
Union,
)
from urllib.parse import quote_plus
from warnings import warn
from sanic.compat import Header, open_async
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
from sanic.cookies import CookieJar
from sanic.helpers import has_message_body, remove_entity_headers
from sanic.http import Http
from sanic.models.protocol_types import HTMLProtocol, Range
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
def __init__(
self,
body: Optional[AnyStr] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
The provided code snippet includes necessary dependencies for implementing the `html` function. Write a Python function `def html( body: Union[str, bytes, HTMLProtocol], status: int = 200, headers: Optional[Dict[str, str]] = None, ) -> HTTPResponse` to solve the following problem:
Returns response object with body in html format. :param body: str or bytes-ish, or an object with __html__ or _repr_html_. :param status: Response code. :param headers: Custom Headers.
Here is the function:
def html(
body: Union[str, bytes, HTMLProtocol],
status: int = 200,
headers: Optional[Dict[str, str]] = None,
) -> HTTPResponse:
"""
Returns response object with body in html format.
:param body: str or bytes-ish, or an object with __html__ or _repr_html_.
:param status: Response code.
:param headers: Custom Headers.
"""
if not isinstance(body, (str, bytes)):
if hasattr(body, "__html__"):
body = body.__html__()
elif hasattr(body, "_repr_html_"):
body = body._repr_html_()
return HTTPResponse( # type: ignore
body,
status=status,
headers=headers,
content_type="text/html; charset=utf-8",
) | Returns response object with body in html format. :param body: str or bytes-ish, or an object with __html__ or _repr_html_. :param status: Response code. :param headers: Custom Headers. |
157,653 | from pathlib import Path
from sanic import Sanic
from sanic.exceptions import SanicException
from sanic.response import redirect
class SanicException(Exception):
message: str = ""
def __init__(
self,
message: Optional[Union[str, bytes]] = None,
status_code: Optional[int] = None,
quiet: Optional[bool] = None,
context: Optional[Dict[str, Any]] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
self.context = context
self.extra = extra
if message is None:
if self.message:
message = self.message
elif status_code is not None:
msg: bytes = STATUS_CODES.get(status_code, b"")
message = msg.decode("utf8")
super().__init__(message)
if status_code is not None:
self.status_code = status_code
# quiet=None/False/True with None meaning choose by status
if quiet or quiet is None and status_code not in (None, 500):
self.quiet = True
def redirect(
to: str,
headers: Optional[Dict[str, str]] = None,
status: int = 302,
content_type: str = "text/html; charset=utf-8",
) -> HTTPResponse:
"""
Abort execution and cause a 302 redirect (by default) by setting a
Location header.
:param to: path or fully qualified URL to redirect to
:param headers: optional dict of headers to include in the new request
:param status: status code (int) of the new request, defaults to 302
:param content_type: the content type (string) of the response
"""
headers = headers or {}
# URL Quote the URL before redirecting
safe_to = quote_plus(to, safe=":/%#?&=@[]!$&'()*+,;")
# According to RFC 7231, a relative URI is now permitted.
headers["Location"] = safe_to
return HTTPResponse(
status=status, headers=headers, content_type=content_type
)
def create_simple_server(directory: Path):
if not directory.is_dir():
raise SanicException(
"Cannot setup Sanic Simple Server without a path to a directory"
)
app = Sanic("SimpleServer")
app.static("/", directory, name="main")
@app.get("/")
def index(_):
return redirect(app.url_for("main", filename="index.html"))
return app | null |
157,654 | from __future__ import annotations
from typing import (
TYPE_CHECKING,
Any,
DefaultDict,
Dict,
List,
NamedTuple,
Optional,
Tuple,
Union,
)
from sanic_routing.route import Route
import email.utils
import uuid
from collections import defaultdict
from http.cookies import SimpleCookie
from types import SimpleNamespace
from urllib.parse import parse_qs, parse_qsl, unquote, urlunparse
from httptools import parse_url
from sanic.compat import CancelledErrors, Header
from sanic.constants import DEFAULT_HTTP_CONTENT_TYPE
from sanic.exceptions import InvalidUsage
from sanic.headers import (
AcceptContainer,
Options,
parse_accept,
parse_content_header,
parse_forwarded,
parse_host,
parse_xforwarded,
)
from sanic.log import error_logger, logger
from sanic.models.protocol_types import TransportProtocol
from sanic.response import BaseHTTPResponse, HTTPResponse
class RequestParameters(dict):
"""
Hosts a dict with lists as values where get returns the first
value of the list and getlist returns the whole shebang
"""
def get(self, name: str, default: Optional[Any] = None) -> Optional[Any]:
"""Return the first value, either the default or actual"""
return super().get(name, [default])[0]
def getlist(
self, name: str, default: Optional[Any] = None
) -> Optional[Any]:
"""
Return the entire list
"""
return super().get(name, default)
class File(NamedTuple):
"""
Model for defining a file. It is a ``namedtuple``, therefore you can
iterate over the object, or access the parameters by name.
:param type: The mimetype, defaults to text/plain
:param body: Bytes of the file
:param name: The filename
"""
type: str
body: bytes
name: str
def parse_content_header(value: str) -> Tuple[str, Options]:
"""Parse content-type and content-disposition header values.
E.g. 'form-data; name=upload; filename=\"file.txt\"' to
('form-data', {'name': 'upload', 'filename': 'file.txt'})
Mostly identical to cgi.parse_header and werkzeug.parse_options_header
but runs faster and handles special characters better. Unescapes quotes.
"""
value = _firefox_quote_escape.sub("%22", value)
pos = value.find(";")
if pos == -1:
options: Dict[str, Union[int, str]] = {}
else:
options = {
m.group(1).lower(): m.group(2) or m.group(3).replace("%22", '"')
for m in _param.finditer(value[pos:])
}
value = value[:pos]
return value.strip().lower(), options
logger = logging.getLogger("sanic.root")
The provided code snippet includes necessary dependencies for implementing the `parse_multipart_form` function. Write a Python function `def parse_multipart_form(body, boundary)` to solve the following problem:
Parse a request body and returns fields and files :param body: bytes request body :param boundary: bytes multipart boundary :return: fields (RequestParameters), files (RequestParameters)
Here is the function:
def parse_multipart_form(body, boundary):
"""
Parse a request body and returns fields and files
:param body: bytes request body
:param boundary: bytes multipart boundary
:return: fields (RequestParameters), files (RequestParameters)
"""
files = RequestParameters()
fields = RequestParameters()
form_parts = body.split(boundary)
for form_part in form_parts[1:-1]:
file_name = None
content_type = "text/plain"
content_charset = "utf-8"
field_name = None
line_index = 2
line_end_index = 0
while not line_end_index == -1:
line_end_index = form_part.find(b"\r\n", line_index)
form_line = form_part[line_index:line_end_index].decode("utf-8")
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(":")
idx = colon_index + 2
form_header_field = form_line[0:colon_index].lower()
form_header_value, form_parameters = parse_content_header(
form_line[idx:]
)
if form_header_field == "content-disposition":
field_name = form_parameters.get("name")
file_name = form_parameters.get("filename")
# non-ASCII filenames in RFC2231, "filename*" format
if file_name is None and form_parameters.get("filename*"):
encoding, _, value = email.utils.decode_rfc2231(
form_parameters["filename*"]
)
file_name = unquote(value, encoding=encoding)
elif form_header_field == "content-type":
content_type = form_header_value
content_charset = form_parameters.get("charset", "utf-8")
if field_name:
post_data = form_part[line_index:-4]
if file_name is None:
value = post_data.decode(content_charset)
if field_name in fields:
fields[field_name].append(value)
else:
fields[field_name] = [value]
else:
form_file = File(
type=content_type, name=file_name, body=post_data
)
if field_name in files:
files[field_name].append(form_file)
else:
files[field_name] = [form_file]
else:
logger.debug(
"Form-data field does not have a 'name' parameter "
"in the Content-Disposition header"
)
return fields, files | Parse a request body and returns fields and files :param body: bytes request body :param boundary: bytes multipart boundary :return: fields (RequestParameters), files (RequestParameters) |
157,655 | from typing import Any, Dict, Optional, Union
from sanic.helpers import STATUS_CODES
class SanicException(Exception):
message: str = ""
def __init__(
self,
message: Optional[Union[str, bytes]] = None,
status_code: Optional[int] = None,
quiet: Optional[bool] = None,
context: Optional[Dict[str, Any]] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
self.context = context
self.extra = extra
if message is None:
if self.message:
message = self.message
elif status_code is not None:
msg: bytes = STATUS_CODES.get(status_code, b"")
message = msg.decode("utf8")
super().__init__(message)
if status_code is not None:
self.status_code = status_code
# quiet=None/False/True with None meaning choose by status
if quiet or quiet is None and status_code not in (None, 500):
self.quiet = True
The provided code snippet includes necessary dependencies for implementing the `abort` function. Write a Python function `def abort(status_code: int, message: Optional[Union[str, bytes]] = None)` to solve the following problem:
Raise an exception based on SanicException. Returns the HTTP response message appropriate for the given status code, unless provided. STATUS_CODES from sanic.helpers for the given status code. :param status_code: The HTTP status code to return. :param message: The HTTP response body. Defaults to the messages in
Here is the function:
def abort(status_code: int, message: Optional[Union[str, bytes]] = None):
"""
Raise an exception based on SanicException. Returns the HTTP response
message appropriate for the given status code, unless provided.
STATUS_CODES from sanic.helpers for the given status code.
:param status_code: The HTTP status code to return.
:param message: The HTTP response body. Defaults to the messages in
"""
import warnings
warnings.warn(
"sanic.exceptions.abort has been marked as deprecated, and will be "
"removed in release 21.12.\n To migrate your code, simply replace "
"abort(status_code, msg) with raise SanicException(msg, status_code), "
"or even better, raise an appropriate SanicException subclass."
)
raise SanicException(message=message, status_code=status_code) | Raise an exception based on SanicException. Returns the HTTP response message appropriate for the given status code, unless provided. STATUS_CODES from sanic.helpers for the given status code. :param status_code: The HTTP status code to return. :param message: The HTTP response body. Defaults to the messages in |
157,656 | from __future__ import annotations
import asyncio
from enum import Enum
from inspect import isawaitable
from typing import Any, Dict, List, Optional, Tuple, Union
from sanic_routing import BaseRouter, Route, RouteGroup
from sanic_routing.exceptions import NotFound
from sanic_routing.utils import path_to_parts
from sanic.exceptions import InvalidSignal
from sanic.log import error_logger, logger
from sanic.models.handler_types import SignalHandler
def _blank():
... | null |
157,657 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
class Accept(str):
def __new__(cls, value: str, *args, **kwargs):
return str.__new__(cls, value)
def __init__(
self,
value: str,
type_: MediaType,
subtype: MediaType,
*,
q: str = "1.0",
**kwargs: str,
):
qvalue = float(q)
if qvalue > 1 or qvalue < 0:
raise InvalidHeader(
f"Accept header qvalue must be between 0 and 1, not: {qvalue}"
)
self.value = value
self.type_ = type_
self.subtype = subtype
self.qvalue = qvalue
self.params = kwargs
def _compare(self, other, method):
try:
return method(self.qvalue, other.qvalue)
except (AttributeError, TypeError):
return NotImplemented
def __lt__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other: Union[str, Accept]): # type: ignore
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other: Union[str, Accept]): # type: ignore
return self._compare(other, lambda s, o: s != o)
def match(
self,
other,
*,
allow_type_wildcard: bool = True,
allow_subtype_wildcard: bool = True,
) -> bool:
type_match = (
self.type_ == other.type_
if allow_type_wildcard
else (
self.type_.match(other.type_)
and not self.type_.is_wildcard
and not other.type_.is_wildcard
)
)
subtype_match = (
self.subtype == other.subtype
if allow_subtype_wildcard
else (
self.subtype.match(other.subtype)
and not self.subtype.is_wildcard
and not other.subtype.is_wildcard
)
)
return type_match and subtype_match
def parse(cls, raw: str) -> Accept:
invalid = False
mtype = raw.strip()
try:
media, *raw_params = mtype.split(";")
type_, subtype = media.split("/")
except ValueError:
invalid = True
if invalid or not type_ or not subtype:
raise InvalidHeader(f"Header contains invalid Accept value: {raw}")
params = dict(
[
(key.strip(), value.strip())
for key, value in (param.split("=", 1) for param in raw_params)
]
)
return cls(mtype, MediaType(type_), MediaType(subtype), **params)
def parse_arg_as_accept(f):
def func(self, other, *args, **kwargs):
if not isinstance(other, Accept) and other:
other = Accept.parse(other)
return f(self, other, *args, **kwargs)
return func | null |
157,658 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
Options = Dict[str, Union[int, str]]
_rparam = re.compile(f"(?:{_token}|{_quoted})={_token}\\s*($|[;,])", re.ASCII)
def fwd_normalize(fwd: OptionsIterable) -> Options:
"""Normalize and convert values extracted from forwarded headers."""
ret: Dict[str, Union[int, str]] = {}
for key, val in fwd:
if val is not None:
try:
if key in ("by", "for"):
ret[key] = fwd_normalize_address(val)
elif key in ("host", "proto"):
ret[key] = val.lower()
elif key == "port":
ret[key] = int(val)
elif key == "path":
ret[key] = unquote(val)
else:
ret[key] = val
except ValueError:
pass
return ret
The provided code snippet includes necessary dependencies for implementing the `parse_forwarded` function. Write a Python function `def parse_forwarded(headers, config) -> Optional[Options]` to solve the following problem:
Parse RFC 7239 Forwarded headers. The value of `by` or `secret` must match `config.FORWARDED_SECRET` :return: dict with keys and values, or None if nothing matched
Here is the function:
def parse_forwarded(headers, config) -> Optional[Options]:
"""Parse RFC 7239 Forwarded headers.
The value of `by` or `secret` must match `config.FORWARDED_SECRET`
:return: dict with keys and values, or None if nothing matched
"""
header = headers.getall("forwarded", None)
secret = config.FORWARDED_SECRET
if header is None or not secret:
return None
header = ",".join(header) # Join multiple header lines
if secret not in header:
return None
# Loop over <separator><key>=<value> elements from right to left
sep = pos = None
options: List[Tuple[str, str]] = []
found = False
for m in _rparam.finditer(header[::-1]):
# Start of new element? (on parser skips and non-semicolon right sep)
if m.start() != pos or sep != ";":
# Was the previous element (from right) what we wanted?
if found:
break
# Clear values and parse as new element
del options[:]
pos = m.end()
val_token, val_quoted, key, sep = m.groups()
key = key.lower()[::-1]
val = (val_token or val_quoted.replace('"\\', '"'))[::-1]
options.append((key, val))
if key in ("secret", "by") and val == secret:
found = True
# Check if we would return on next round, to avoid useless parse
if found and sep != ";":
break
# If secret was found, return the matching options in left-to-right order
return fwd_normalize(reversed(options)) if found else None | Parse RFC 7239 Forwarded headers. The value of `by` or `secret` must match `config.FORWARDED_SECRET` :return: dict with keys and values, or None if nothing matched |
157,659 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
Options = Dict[str, Union[int, str]]
def fwd_normalize(fwd: OptionsIterable) -> Options:
"""Normalize and convert values extracted from forwarded headers."""
ret: Dict[str, Union[int, str]] = {}
for key, val in fwd:
if val is not None:
try:
if key in ("by", "for"):
ret[key] = fwd_normalize_address(val)
elif key in ("host", "proto"):
ret[key] = val.lower()
elif key == "port":
ret[key] = int(val)
elif key == "path":
ret[key] = unquote(val)
else:
ret[key] = val
except ValueError:
pass
return ret
The provided code snippet includes necessary dependencies for implementing the `parse_xforwarded` function. Write a Python function `def parse_xforwarded(headers, config) -> Optional[Options]` to solve the following problem:
Parse traditional proxy headers.
Here is the function:
def parse_xforwarded(headers, config) -> Optional[Options]:
"""Parse traditional proxy headers."""
real_ip_header = config.REAL_IP_HEADER
proxies_count = config.PROXIES_COUNT
addr = real_ip_header and headers.getone(real_ip_header, None)
if not addr and proxies_count:
assert proxies_count > 0
try:
# Combine, split and filter multiple headers' entries
forwarded_for = headers.getall(config.FORWARDED_FOR_HEADER)
proxies = [
p
for p in (
p.strip() for h in forwarded_for for p in h.split(",")
)
if p
]
addr = proxies[-proxies_count]
except (KeyError, IndexError):
pass
# No processing of other headers if no address is found
if not addr:
return None
def options():
yield "for", addr
for key, header in (
("proto", "x-scheme"),
("proto", "x-forwarded-proto"), # Overrides X-Scheme if present
("host", "x-forwarded-host"),
("port", "x-forwarded-port"),
("path", "x-forwarded-path"),
):
yield key, headers.getone(header, None)
return fwd_normalize(options()) | Parse traditional proxy headers. |
157,660 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
_host_re = re.compile(
r"((?:\[" + _ipv6 + r"\])|[a-zA-Z0-9.\-]{1,253})(?::(\d{1,5}))?"
)
The provided code snippet includes necessary dependencies for implementing the `parse_host` function. Write a Python function `def parse_host(host: str) -> Tuple[Optional[str], Optional[int]]` to solve the following problem:
Split host:port into hostname and port. :return: None in place of missing elements
Here is the function:
def parse_host(host: str) -> Tuple[Optional[str], Optional[int]]:
"""Split host:port into hostname and port.
:return: None in place of missing elements
"""
m = _host_re.fullmatch(host)
if not m:
return None, None
host, port = m.groups()
return host.lower(), int(port) if port is not None else None | Split host:port into hostname and port. :return: None in place of missing elements |
157,661 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
HeaderBytesIterable = Iterable[Tuple[bytes, bytes]]
_HTTP1_STATUSLINES = [
b"HTTP/1.1 %d %b\r\n" % (status, STATUS_CODES.get(status, b"UNKNOWN"))
for status in range(1000)
]
The provided code snippet includes necessary dependencies for implementing the `format_http1_response` function. Write a Python function `def format_http1_response(status: int, headers: HeaderBytesIterable) -> bytes` to solve the following problem:
Format a HTTP/1.1 response header.
Here is the function:
def format_http1_response(status: int, headers: HeaderBytesIterable) -> bytes:
"""Format a HTTP/1.1 response header."""
# Note: benchmarks show that here bytes concat is faster than bytearray,
# b"".join() or %-formatting. %timeit any changes you make.
ret = _HTTP1_STATUSLINES[status]
for h in headers:
ret += b"%b: %b\r\n" % h
ret += b"\r\n"
return ret | Format a HTTP/1.1 response header. |
157,662 | from __future__ import annotations
import re
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
from urllib.parse import unquote
from sanic.exceptions import InvalidHeader
from sanic.helpers import STATUS_CODES
class Accept(str):
def __new__(cls, value: str, *args, **kwargs):
return str.__new__(cls, value)
def __init__(
self,
value: str,
type_: MediaType,
subtype: MediaType,
*,
q: str = "1.0",
**kwargs: str,
):
qvalue = float(q)
if qvalue > 1 or qvalue < 0:
raise InvalidHeader(
f"Accept header qvalue must be between 0 and 1, not: {qvalue}"
)
self.value = value
self.type_ = type_
self.subtype = subtype
self.qvalue = qvalue
self.params = kwargs
def _compare(self, other, method):
try:
return method(self.qvalue, other.qvalue)
except (AttributeError, TypeError):
return NotImplemented
def __lt__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other: Union[str, Accept]): # type: ignore
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other: Union[str, Accept]):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other: Union[str, Accept]): # type: ignore
return self._compare(other, lambda s, o: s != o)
def match(
self,
other,
*,
allow_type_wildcard: bool = True,
allow_subtype_wildcard: bool = True,
) -> bool:
type_match = (
self.type_ == other.type_
if allow_type_wildcard
else (
self.type_.match(other.type_)
and not self.type_.is_wildcard
and not other.type_.is_wildcard
)
)
subtype_match = (
self.subtype == other.subtype
if allow_subtype_wildcard
else (
self.subtype.match(other.subtype)
and not self.subtype.is_wildcard
and not other.subtype.is_wildcard
)
)
return type_match and subtype_match
def parse(cls, raw: str) -> Accept:
invalid = False
mtype = raw.strip()
try:
media, *raw_params = mtype.split(";")
type_, subtype = media.split("/")
except ValueError:
invalid = True
if invalid or not type_ or not subtype:
raise InvalidHeader(f"Header contains invalid Accept value: {raw}")
params = dict(
[
(key.strip(), value.strip())
for key, value in (param.split("=", 1) for param in raw_params)
]
)
return cls(mtype, MediaType(type_), MediaType(subtype), **params)
class AcceptContainer(list):
def __contains__(self, o: object) -> bool:
return any(item.match(o) for item in self)
def match(
self,
o: object,
*,
allow_type_wildcard: bool = True,
allow_subtype_wildcard: bool = True,
) -> bool:
return any(
item.match(
o,
allow_type_wildcard=allow_type_wildcard,
allow_subtype_wildcard=allow_subtype_wildcard,
)
for item in self
)
def _sort_accept_value(accept: Accept):
return (
accept.qvalue,
len(accept.params),
accept.subtype != "*",
accept.type_ != "*",
)
The provided code snippet includes necessary dependencies for implementing the `parse_accept` function. Write a Python function `def parse_accept(accept: str) -> AcceptContainer` to solve the following problem:
Parse an Accept header and order the acceptable media types in accorsing to RFC 7231, s. 5.3.2 https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
Here is the function:
def parse_accept(accept: str) -> AcceptContainer:
"""Parse an Accept header and order the acceptable media types in
accorsing to RFC 7231, s. 5.3.2
https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
"""
media_types = accept.split(",")
accept_list: List[Accept] = []
for mtype in media_types:
if not mtype:
continue
accept_list.append(Accept.parse(mtype))
return AcceptContainer(
sorted(accept_list, key=_sort_accept_value, reverse=True)
) | Parse an Accept header and order the acceptable media types in accorsing to RFC 7231, s. 5.3.2 https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2 |
157,663 | import re
import sys
from os import environ
BASE_LOGO = """
Sanic
Build Fast. Run Fast.
"""
COFFEE_LOGO = """\033[48;2;255;13;104m \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ▄████████▄ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ██ ██▀▀▄ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ███████████ █ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ███████████▄▄▀ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ▀███████▀ \033[0m
\033[48;2;255;13;104m \033[0m
Dark roast. No sugar."""
COLOR_LOGO = """\033[48;2;255;13;104m \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ▄███ █████ ██ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ██ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ▀███████ ███▄ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ██ \033[0m
\033[38;2;255;255;255;48;2;255;13;104m ████ ████████▀ \033[0m
\033[48;2;255;13;104m \033[0m
Build Fast. Run Fast."""
FULL_COLOR_LOGO = """
\033[38;2;255;13;104m ▄███ █████ ██ \033[0m ▄█▄ ██ █ █ ▄██████████
\033[38;2;255;13;104m ██ \033[0m █ █ █ ██ █ █ ██
\033[38;2;255;13;104m ▀███████ ███▄ \033[0m ▀ █ █ ██ ▄ █ ██
\033[38;2;255;13;104m ██\033[0m █████████ █ ██ █ █ ▄▄
\033[38;2;255;13;104m ████ ████████▀ \033[0m █ █ █ ██ █ ▀██ ███████
"""
ansi_pattern = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
def get_logo(full=False, coffee=False):
logo = (
(FULL_COLOR_LOGO if full else (COFFEE_LOGO if coffee else COLOR_LOGO))
if sys.stdout.isatty()
else BASE_LOGO
)
if (
sys.platform == "darwin"
and environ.get("TERM_PROGRAM") == "Apple_Terminal"
):
logo = ansi_pattern.sub("", logo)
return logo | null |
157,664 | from importlib import import_module
from inspect import ismodule
from typing import Dict
The provided code snippet includes necessary dependencies for implementing the `has_message_body` function. Write a Python function `def has_message_body(status)` to solve the following problem:
According to the following RFC message body and length SHOULD NOT be included in responses status 1XX, 204 and 304. https://tools.ietf.org/html/rfc2616#section-4.4 https://tools.ietf.org/html/rfc2616#section-4.3
Here is the function:
def has_message_body(status):
"""
According to the following RFC message body and length SHOULD NOT
be included in responses status 1XX, 204 and 304.
https://tools.ietf.org/html/rfc2616#section-4.4
https://tools.ietf.org/html/rfc2616#section-4.3
"""
return status not in (204, 304) and not (100 <= status < 200) | According to the following RFC message body and length SHOULD NOT be included in responses status 1XX, 204 and 304. https://tools.ietf.org/html/rfc2616#section-4.4 https://tools.ietf.org/html/rfc2616#section-4.3 |
157,665 | from importlib import import_module
from inspect import ismodule
from typing import Dict
_HOP_BY_HOP_HEADERS = frozenset(
[
"connection",
"keep-alive",
"proxy-authenticate",
"proxy-authorization",
"te",
"trailers",
"transfer-encoding",
"upgrade",
]
)
The provided code snippet includes necessary dependencies for implementing the `is_hop_by_hop_header` function. Write a Python function `def is_hop_by_hop_header(header)` to solve the following problem:
Checks if the given header is a Hop By Hop header
Here is the function:
def is_hop_by_hop_header(header):
"""Checks if the given header is a Hop By Hop header"""
return header.lower() in _HOP_BY_HOP_HEADERS | Checks if the given header is a Hop By Hop header |
157,666 | from importlib import import_module
from inspect import ismodule
from typing import Dict
def is_entity_header(header):
"""Checks if the given header is an Entity Header"""
return header.lower() in _ENTITY_HEADERS
The provided code snippet includes necessary dependencies for implementing the `remove_entity_headers` function. Write a Python function `def remove_entity_headers(headers, allowed=("content-location", "expires"))` to solve the following problem:
Removes all the entity headers present in the headers given. According to RFC 2616 Section 10.3.5, Content-Location and Expires are allowed as for the "strong cache validator". https://tools.ietf.org/html/rfc2616#section-10.3.5 returns the headers without the entity headers
Here is the function:
def remove_entity_headers(headers, allowed=("content-location", "expires")):
"""
Removes all the entity headers present in the headers given.
According to RFC 2616 Section 10.3.5,
Content-Location and Expires are allowed as for the
"strong cache validator".
https://tools.ietf.org/html/rfc2616#section-10.3.5
returns the headers without the entity headers
"""
allowed = set([h.lower() for h in allowed])
headers = {
header: value
for header, value in headers.items()
if not is_entity_header(header) or header.lower() in allowed
}
return headers | Removes all the entity headers present in the headers given. According to RFC 2616 Section 10.3.5, Content-Location and Expires are allowed as for the "strong cache validator". https://tools.ietf.org/html/rfc2616#section-10.3.5 returns the headers without the entity headers |
157,667 | import asyncio
import os
import signal
from sys import argv
from multidict import CIMultiDict
def enable_windows_color_support():
import ctypes
kernel = ctypes.windll.kernel32
kernel.SetConsoleMode(kernel.GetStdHandle(-11), 7) | null |
157,668 | import asyncio
import os
import signal
from sys import argv
from multidict import CIMultiDict
def stat_async(path):
return trio.Path(path).stat() | null |
157,669 | from __future__ import annotations
from ssl import SSLContext
from typing import TYPE_CHECKING, Dict, Optional, Type, Union
from sanic.config import Config
from sanic.server.events import trigger_events
import asyncio
import multiprocessing
import os
import socket
from functools import partial
from signal import SIG_IGN, SIGINT, SIGTERM, Signals
from signal import signal as signal_func
from sanic.compat import OS_IS_WINDOWS, ctrlc_workaround_for_windows
from sanic.log import error_logger, logger
from sanic.models.server_types import Signal
from sanic.server.async_server import AsyncioServer
from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.socket import (
bind_socket,
bind_unix_socket,
remove_unix_socket,
)
def serve(
host,
port,
app: Sanic,
ssl: Optional[SSLContext] = None,
sock: Optional[socket.socket] = None,
unix: Optional[str] = None,
reuse_port: bool = False,
loop=None,
protocol: Type[asyncio.Protocol] = HttpProtocol,
backlog: int = 100,
register_sys_signals: bool = True,
run_multiple: bool = False,
run_async: bool = False,
connections=None,
signal=Signal(),
state=None,
asyncio_server_kwargs=None,
):
"""Start asynchronous HTTP Server on an individual process.
:param host: Address to host on
:param port: Port to host on
:param before_start: function to be executed before the server starts
listening. Takes arguments `app` instance and `loop`
:param after_start: function to be executed after the server starts
listening. Takes arguments `app` instance and `loop`
:param before_stop: function to be executed when a stop signal is
received before it is respected. Takes arguments
`app` instance and `loop`
:param after_stop: function to be executed when a stop signal is
received after it is respected. Takes arguments
`app` instance and `loop`
:param ssl: SSLContext
:param sock: Socket for the server to accept connections from
:param unix: Unix socket to listen on instead of TCP port
:param reuse_port: `True` for multiple workers
:param loop: asyncio compatible event loop
:param run_async: bool: Do not create a new event loop for the server,
and return an AsyncServer object rather than running it
:param asyncio_server_kwargs: key-value args for asyncio/uvloop
create_server method
:return: Nothing
"""
if not run_async and not loop:
# create new event_loop after fork
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if app.debug:
loop.set_debug(app.debug)
app.asgi = False
connections = connections if connections is not None else set()
protocol_kwargs = _build_protocol_kwargs(protocol, app.config)
server = partial(
protocol,
loop=loop,
connections=connections,
signal=signal,
app=app,
state=state,
unix=unix,
**protocol_kwargs,
)
asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {}
)
# UNIX sockets are always bound by us (to preserve semantics between modes)
if unix:
sock = bind_unix_socket(unix, backlog=backlog)
server_coroutine = loop.create_server(
server,
None if sock else host,
None if sock else port,
ssl=ssl,
reuse_port=reuse_port,
sock=sock,
backlog=backlog,
**asyncio_server_kwargs,
)
if run_async:
return AsyncioServer(
app=app,
loop=loop,
serve_coro=server_coroutine,
connections=connections,
)
loop.run_until_complete(app._startup())
loop.run_until_complete(app._server_event("init", "before"))
try:
http_server = loop.run_until_complete(server_coroutine)
except BaseException:
error_logger.exception("Unable to start server")
return
# Ignore SIGINT when run_multiple
if run_multiple:
signal_func(SIGINT, SIG_IGN)
os.environ["SANIC_WORKER_PROCESS"] = "true"
# Register signals for graceful termination
if register_sys_signals:
if OS_IS_WINDOWS:
ctrlc_workaround_for_windows(app)
else:
for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]:
loop.add_signal_handler(_signal, app.stop)
loop.run_until_complete(app._server_event("init", "after"))
pid = os.getpid()
try:
logger.info("Starting worker [%s]", pid)
loop.run_forever()
finally:
logger.info("Stopping worker [%s]", pid)
# Run the on_stop function if provided
loop.run_until_complete(app._server_event("shutdown", "before"))
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections:
connection.close_if_idle()
# Gracefully shutdown timeout.
# We should provide graceful_shutdown_timeout,
# instead of letting connection hangs forever.
# Let's roughly calcucate time.
graceful = app.config.GRACEFUL_SHUTDOWN_TIMEOUT
start_shutdown: float = 0
while connections and (start_shutdown < graceful):
loop.run_until_complete(asyncio.sleep(0.1))
start_shutdown = start_shutdown + 0.1
# Force close non-idle connection after waiting for
# graceful_shutdown_timeout
for conn in connections:
if hasattr(conn, "websocket") and conn.websocket:
conn.websocket.fail_connection(code=1001)
else:
conn.abort()
loop.run_until_complete(app._server_event("shutdown", "after"))
remove_unix_socket(unix)
def trigger_events(events: Optional[Iterable[Callable[..., Any]]], loop):
"""
Trigger event callbacks (functions or async)
:param events: one or more sync or async functions to execute
:param loop: event loop
"""
if events:
for event in events:
result = event(loop)
if isawaitable(result):
loop.run_until_complete(result)
def serve_single(server_settings):
main_start = server_settings.pop("main_start", None)
main_stop = server_settings.pop("main_stop", None)
if not server_settings.get("run_async"):
# create new event_loop after fork
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
server_settings["loop"] = loop
trigger_events(main_start, server_settings["loop"])
serve(**server_settings)
trigger_events(main_stop, server_settings["loop"])
server_settings["loop"].close() | null |
157,670 | from __future__ import annotations
from ssl import SSLContext
from typing import TYPE_CHECKING, Dict, Optional, Type, Union
from sanic.config import Config
from sanic.server.events import trigger_events
import asyncio
import multiprocessing
import os
import socket
from functools import partial
from signal import SIG_IGN, SIGINT, SIGTERM, Signals
from signal import signal as signal_func
from sanic.compat import OS_IS_WINDOWS, ctrlc_workaround_for_windows
from sanic.log import error_logger, logger
from sanic.models.server_types import Signal
from sanic.server.async_server import AsyncioServer
from sanic.server.protocols.http_protocol import HttpProtocol
from sanic.server.socket import (
bind_socket,
bind_unix_socket,
remove_unix_socket,
)
def serve(
host,
port,
app: Sanic,
ssl: Optional[SSLContext] = None,
sock: Optional[socket.socket] = None,
unix: Optional[str] = None,
reuse_port: bool = False,
loop=None,
protocol: Type[asyncio.Protocol] = HttpProtocol,
backlog: int = 100,
register_sys_signals: bool = True,
run_multiple: bool = False,
run_async: bool = False,
connections=None,
signal=Signal(),
state=None,
asyncio_server_kwargs=None,
):
"""Start asynchronous HTTP Server on an individual process.
:param host: Address to host on
:param port: Port to host on
:param before_start: function to be executed before the server starts
listening. Takes arguments `app` instance and `loop`
:param after_start: function to be executed after the server starts
listening. Takes arguments `app` instance and `loop`
:param before_stop: function to be executed when a stop signal is
received before it is respected. Takes arguments
`app` instance and `loop`
:param after_stop: function to be executed when a stop signal is
received after it is respected. Takes arguments
`app` instance and `loop`
:param ssl: SSLContext
:param sock: Socket for the server to accept connections from
:param unix: Unix socket to listen on instead of TCP port
:param reuse_port: `True` for multiple workers
:param loop: asyncio compatible event loop
:param run_async: bool: Do not create a new event loop for the server,
and return an AsyncServer object rather than running it
:param asyncio_server_kwargs: key-value args for asyncio/uvloop
create_server method
:return: Nothing
"""
if not run_async and not loop:
# create new event_loop after fork
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if app.debug:
loop.set_debug(app.debug)
app.asgi = False
connections = connections if connections is not None else set()
protocol_kwargs = _build_protocol_kwargs(protocol, app.config)
server = partial(
protocol,
loop=loop,
connections=connections,
signal=signal,
app=app,
state=state,
unix=unix,
**protocol_kwargs,
)
asyncio_server_kwargs = (
asyncio_server_kwargs if asyncio_server_kwargs else {}
)
# UNIX sockets are always bound by us (to preserve semantics between modes)
if unix:
sock = bind_unix_socket(unix, backlog=backlog)
server_coroutine = loop.create_server(
server,
None if sock else host,
None if sock else port,
ssl=ssl,
reuse_port=reuse_port,
sock=sock,
backlog=backlog,
**asyncio_server_kwargs,
)
if run_async:
return AsyncioServer(
app=app,
loop=loop,
serve_coro=server_coroutine,
connections=connections,
)
loop.run_until_complete(app._startup())
loop.run_until_complete(app._server_event("init", "before"))
try:
http_server = loop.run_until_complete(server_coroutine)
except BaseException:
error_logger.exception("Unable to start server")
return
# Ignore SIGINT when run_multiple
if run_multiple:
signal_func(SIGINT, SIG_IGN)
os.environ["SANIC_WORKER_PROCESS"] = "true"
# Register signals for graceful termination
if register_sys_signals:
if OS_IS_WINDOWS:
ctrlc_workaround_for_windows(app)
else:
for _signal in [SIGTERM] if run_multiple else [SIGINT, SIGTERM]:
loop.add_signal_handler(_signal, app.stop)
loop.run_until_complete(app._server_event("init", "after"))
pid = os.getpid()
try:
logger.info("Starting worker [%s]", pid)
loop.run_forever()
finally:
logger.info("Stopping worker [%s]", pid)
# Run the on_stop function if provided
loop.run_until_complete(app._server_event("shutdown", "before"))
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections:
connection.close_if_idle()
# Gracefully shutdown timeout.
# We should provide graceful_shutdown_timeout,
# instead of letting connection hangs forever.
# Let's roughly calcucate time.
graceful = app.config.GRACEFUL_SHUTDOWN_TIMEOUT
start_shutdown: float = 0
while connections and (start_shutdown < graceful):
loop.run_until_complete(asyncio.sleep(0.1))
start_shutdown = start_shutdown + 0.1
# Force close non-idle connection after waiting for
# graceful_shutdown_timeout
for conn in connections:
if hasattr(conn, "websocket") and conn.websocket:
conn.websocket.fail_connection(code=1001)
else:
conn.abort()
loop.run_until_complete(app._server_event("shutdown", "after"))
remove_unix_socket(unix)
def trigger_events(events: Optional[Iterable[Callable[..., Any]]], loop):
"""
Trigger event callbacks (functions or async)
:param events: one or more sync or async functions to execute
:param loop: event loop
"""
if events:
for event in events:
result = event(loop)
if isawaitable(result):
loop.run_until_complete(result)
logger = logging.getLogger("sanic.root")
def bind_socket(host: str, port: int, *, backlog=100) -> socket.socket:
"""Create TCP server socket.
:param host: IPv4, IPv6 or hostname may be specified
:param port: TCP port number
:param backlog: Maximum number of connections to queue
:return: socket.socket object
"""
try: # IP address: family must be specified for IPv6 at least
ip = ip_address(host)
host = str(ip)
sock = socket.socket(
socket.AF_INET6 if ip.version == 6 else socket.AF_INET
)
except ValueError: # Hostname, may become AF_INET or AF_INET6
sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(backlog)
return sock
def bind_unix_socket(path: str, *, mode=0o666, backlog=100) -> socket.socket:
"""Create unix socket.
:param path: filesystem path
:param backlog: Maximum number of connections to queue
:return: socket.socket object
"""
"""Open or atomically replace existing socket with zero downtime."""
# Sanitise and pre-verify socket path
path = os.path.abspath(path)
folder = os.path.dirname(path)
if not os.path.isdir(folder):
raise FileNotFoundError(f"Socket folder does not exist: {folder}")
try:
if not stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode):
raise FileExistsError(f"Existing file is not a socket: {path}")
except FileNotFoundError:
pass
# Create new socket with a random temporary name
tmp_path = f"{path}.{secrets.token_urlsafe()}"
sock = socket.socket(socket.AF_UNIX)
try:
# Critical section begins (filename races)
sock.bind(tmp_path)
try:
os.chmod(tmp_path, mode)
# Start listening before rename to avoid connection failures
sock.listen(backlog)
os.rename(tmp_path, path)
except: # noqa: E722
try:
os.unlink(tmp_path)
finally:
raise
except: # noqa: E722
try:
sock.close()
finally:
raise
return sock
def remove_unix_socket(path: Optional[str]) -> None:
"""Remove dead unix socket during server exit."""
if not path:
return
try:
if stat.S_ISSOCK(os.stat(path, follow_symlinks=False).st_mode):
# Is it actually dead (doesn't belong to a new server instance)?
with socket.socket(socket.AF_UNIX) as testsock:
try:
testsock.connect(path)
except ConnectionRefusedError:
os.unlink(path)
except FileNotFoundError:
pass
The provided code snippet includes necessary dependencies for implementing the `serve_multiple` function. Write a Python function `def serve_multiple(server_settings, workers)` to solve the following problem:
Start multiple server processes simultaneously. Stop on interrupt and terminate signals, and drain connections when complete. :param server_settings: kw arguments to be passed to the serve function :param workers: number of workers to launch :param stop_event: if provided, is used as a stop signal :return:
Here is the function:
def serve_multiple(server_settings, workers):
"""Start multiple server processes simultaneously. Stop on interrupt
and terminate signals, and drain connections when complete.
:param server_settings: kw arguments to be passed to the serve function
:param workers: number of workers to launch
:param stop_event: if provided, is used as a stop signal
:return:
"""
server_settings["reuse_port"] = True
server_settings["run_multiple"] = True
main_start = server_settings.pop("main_start", None)
main_stop = server_settings.pop("main_stop", None)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
trigger_events(main_start, loop)
# Create a listening socket or use the one in settings
sock = server_settings.get("sock")
unix = server_settings["unix"]
backlog = server_settings["backlog"]
if unix:
sock = bind_unix_socket(unix, backlog=backlog)
server_settings["unix"] = unix
if sock is None:
sock = bind_socket(
server_settings["host"], server_settings["port"], backlog=backlog
)
sock.set_inheritable(True)
server_settings["sock"] = sock
server_settings["host"] = None
server_settings["port"] = None
processes = []
def sig_handler(signal, frame):
logger.info("Received signal %s. Shutting down.", Signals(signal).name)
for process in processes:
os.kill(process.pid, SIGTERM)
signal_func(SIGINT, lambda s, f: sig_handler(s, f))
signal_func(SIGTERM, lambda s, f: sig_handler(s, f))
mp = multiprocessing.get_context("fork")
for _ in range(workers):
process = mp.Process(
target=serve,
kwargs=server_settings,
)
process.daemon = True
process.start()
processes.append(process)
for process in processes:
process.join()
# the above processes will block this until they're stopped
for process in processes:
process.terminate()
trigger_events(main_stop, loop)
sock.close()
loop.close()
remove_unix_socket(unix) | Start multiple server processes simultaneously. Stop on interrupt and terminate signals, and drain connections when complete. :param server_settings: kw arguments to be passed to the serve function :param workers: number of workers to launch :param stop_event: if provided, is used as a stop signal :return: |
157,671 | import sys
import typing as t
from functools import partial
from traceback import extract_tb
from sanic.exceptions import InvalidUsage, SanicException
from sanic.helpers import STATUS_CODES
from sanic.request import Request
from sanic.response import HTTPResponse, html, json, text
The provided code snippet includes necessary dependencies for implementing the `escape` function. Write a Python function `def escape(text)` to solve the following problem:
Minimal HTML escaping, not for attribute values (unlike html.escape).
Here is the function:
def escape(text):
"""
Minimal HTML escaping, not for attribute values (unlike html.escape).
"""
return f"{text}".replace("&", "&").replace("<", "<") | Minimal HTML escaping, not for attribute values (unlike html.escape). |
157,672 | import sys
import typing as t
from functools import partial
from traceback import extract_tb
from sanic.exceptions import InvalidUsage, SanicException
from sanic.helpers import STATUS_CODES
from sanic.request import Request
from sanic.response import HTTPResponse, html, json, text
RENDERERS_BY_CONFIG = {
"html": HTMLRenderer,
"json": JSONRenderer,
"text": TextRenderer,
}
class SanicException(Exception):
def __init__(
self,
message: Optional[Union[str, bytes]] = None,
status_code: Optional[int] = None,
quiet: Optional[bool] = None,
context: Optional[Dict[str, Any]] = None,
extra: Optional[Dict[str, Any]] = None,
) -> None:
def check_error_format(format):
if format not in RENDERERS_BY_CONFIG and format != "auto":
raise SanicException(f"Unknown format: {format}") | null |
157,673 | import sys
import typing as t
from functools import partial
from traceback import extract_tb
from sanic.exceptions import InvalidUsage, SanicException
from sanic.helpers import STATUS_CODES
from sanic.request import Request
from sanic.response import HTTPResponse, html, json, text
class BaseRenderer:
"""
Base class that all renderers must inherit from.
"""
dumps = staticmethod(dumps)
def __init__(self, request, exception, debug):
self.request = request
self.exception = exception
self.debug = debug
def headers(self):
if isinstance(self.exception, SanicException):
return getattr(self.exception, "headers", {})
return {}
def status(self):
if isinstance(self.exception, SanicException):
return getattr(self.exception, "status_code", FALLBACK_STATUS)
return FALLBACK_STATUS
def text(self):
if self.debug or isinstance(self.exception, SanicException):
return str(self.exception)
return FALLBACK_TEXT
def title(self):
status_text = STATUS_CODES.get(self.status, b"Error Occurred").decode()
return f"{self.status} — {status_text}"
def render(self) -> HTTPResponse:
"""
Outputs the exception as a :class:`HTTPResponse`.
:return: The formatted exception
:rtype: str
"""
output = (
self.full
if self.debug and not getattr(self.exception, "quiet", False)
else self.minimal
)
return output()
def minimal(self) -> HTTPResponse: # noqa
"""
Provide a formatted message that is meant to not show any sensitive
data or details.
"""
raise NotImplementedError
def full(self) -> HTTPResponse: # noqa
"""
Provide a formatted message that has all details and is mean to be used
primarily for debugging and non-production environments.
"""
raise NotImplementedError
class HTMLRenderer(BaseRenderer):
"""
Render an exception as HTML.
The default fallback type.
"""
TRACEBACK_STYLE = """
html { font-family: sans-serif }
h2 { color: #888; }
.tb-wrapper p, dl, dd { margin: 0 }
.frame-border { margin: 1rem }
.frame-line > *, dt, dd { padding: 0.3rem 0.6rem }
.frame-line, dl { margin-bottom: 0.3rem }
.frame-code, dd { font-size: 16px; padding-left: 4ch }
.tb-wrapper, dl { border: 1px solid #eee }
.tb-header,.obj-header {
background: #eee; padding: 0.3rem; font-weight: bold
}
.frame-descriptor, dt { background: #e2eafb; font-size: 14px }
"""
TRACEBACK_WRAPPER_HTML = (
"<div class=tb-header>{exc_name}: {exc_value}</div>"
"<div class=tb-wrapper>{frame_html}</div>"
)
TRACEBACK_BORDER = (
"<div class=frame-border>"
"The above exception was the direct cause of the following exception:"
"</div>"
)
TRACEBACK_LINE_HTML = (
"<div class=frame-line>"
"<p class=frame-descriptor>"
"File {0.filename}, line <i>{0.lineno}</i>, "
"in <code><b>{0.name}</b></code>"
"<p class=frame-code><code>{0.line}</code>"
"</div>"
)
OBJECT_WRAPPER_HTML = (
"<div class=obj-header>{title}</div>"
"<dl class={obj_type}>{display_html}</dl>"
)
OBJECT_DISPLAY_HTML = "<dt>{key}</dt><dd><code>{value}</code></dd>"
OUTPUT_HTML = (
"<!DOCTYPE html><html lang=en>"
"<meta charset=UTF-8><title>{title}</title>\n"
"<style>{style}</style>\n"
"<h1>{title}</h1><p>{text}\n"
"{body}"
)
def full(self) -> HTTPResponse:
return html(
self.OUTPUT_HTML.format(
title=self.title,
text=self.text,
style=self.TRACEBACK_STYLE,
body=self._generate_body(full=True),
),
status=self.status,
)
def minimal(self) -> HTTPResponse:
return html(
self.OUTPUT_HTML.format(
title=self.title,
text=self.text,
style=self.TRACEBACK_STYLE,
body=self._generate_body(full=False),
),
status=self.status,
headers=self.headers,
)
def text(self):
return escape(super().text)
def title(self):
return escape(f"⚠️ {super().title}")
def _generate_body(self, *, full):
lines = []
if full:
_, exc_value, __ = sys.exc_info()
exceptions = []
while exc_value:
exceptions.append(self._format_exc(exc_value))
exc_value = exc_value.__cause__
traceback_html = self.TRACEBACK_BORDER.join(reversed(exceptions))
appname = escape(self.request.app.name)
name = escape(self.exception.__class__.__name__)
value = escape(self.exception)
path = escape(self.request.path)
lines += [
f"<h2>Traceback of {appname} " "(most recent call last):</h2>",
f"{traceback_html}",
"<div class=summary><p>",
f"<b>{name}: {value}</b> "
f"while handling path <code>{path}</code>",
"</div>",
]
for attr, display in (("context", True), ("extra", bool(full))):
info = getattr(self.exception, attr, None)
if info and display:
lines.append(self._generate_object_display(info, attr))
return "\n".join(lines)
def _generate_object_display(
self, obj: t.Dict[str, t.Any], descriptor: str
) -> str:
display = "".join(
self.OBJECT_DISPLAY_HTML.format(key=key, value=value)
for key, value in obj.items()
)
return self.OBJECT_WRAPPER_HTML.format(
title=descriptor.title(),
display_html=display,
obj_type=descriptor.lower(),
)
def _format_exc(self, exc):
frames = extract_tb(exc.__traceback__)
frame_html = "".join(
self.TRACEBACK_LINE_HTML.format(frame) for frame in frames
)
return self.TRACEBACK_WRAPPER_HTML.format(
exc_name=escape(exc.__class__.__name__),
exc_value=escape(exc),
frame_html=frame_html,
)
class TextRenderer(BaseRenderer):
"""
Render an exception as plain text.
"""
OUTPUT_TEXT = "{title}\n{bar}\n{text}\n\n{body}"
SPACER = " "
def full(self) -> HTTPResponse:
return text(
self.OUTPUT_TEXT.format(
title=self.title,
text=self.text,
bar=("=" * len(self.title)),
body=self._generate_body(full=True),
),
status=self.status,
)
def minimal(self) -> HTTPResponse:
return text(
self.OUTPUT_TEXT.format(
title=self.title,
text=self.text,
bar=("=" * len(self.title)),
body=self._generate_body(full=False),
),
status=self.status,
headers=self.headers,
)
def title(self):
return f"⚠️ {super().title}"
def _generate_body(self, *, full):
lines = []
if full:
_, exc_value, __ = sys.exc_info()
exceptions = []
lines += [
f"{self.exception.__class__.__name__}: {self.exception} while "
f"handling path {self.request.path}",
f"Traceback of {self.request.app.name} "
"(most recent call last):\n",
]
while exc_value:
exceptions.append(self._format_exc(exc_value))
exc_value = exc_value.__cause__
lines += exceptions[::-1]
for attr, display in (("context", True), ("extra", bool(full))):
info = getattr(self.exception, attr, None)
if info and display:
lines += self._generate_object_display_list(info, attr)
return "\n".join(lines)
def _format_exc(self, exc):
frames = "\n\n".join(
[
f"{self.SPACER * 2}File {frame.filename}, "
f"line {frame.lineno}, in "
f"{frame.name}\n{self.SPACER * 2}{frame.line}"
for frame in extract_tb(exc.__traceback__)
]
)
return f"{self.SPACER}{exc.__class__.__name__}: {exc}\n{frames}"
def _generate_object_display_list(self, obj, descriptor):
lines = [f"\n{descriptor.title()}"]
for key, value in obj.items():
display = self.dumps(value)
lines.append(f"{self.SPACER * 2}{key}: {display}")
return lines
class JSONRenderer(BaseRenderer):
"""
Render an exception as JSON.
"""
def full(self) -> HTTPResponse:
output = self._generate_output(full=True)
return json(output, status=self.status, dumps=self.dumps)
def minimal(self) -> HTTPResponse:
output = self._generate_output(full=False)
return json(output, status=self.status, dumps=self.dumps)
def _generate_output(self, *, full):
output = {
"description": self.title,
"status": self.status,
"message": self.text,
}
for attr, display in (("context", True), ("extra", bool(full))):
info = getattr(self.exception, attr, None)
if info and display:
output[attr] = info
if full:
_, exc_value, __ = sys.exc_info()
exceptions = []
while exc_value:
exceptions.append(
{
"type": exc_value.__class__.__name__,
"exception": str(exc_value),
"frames": [
{
"file": frame.filename,
"line": frame.lineno,
"name": frame.name,
"src": frame.line,
}
for frame in extract_tb(exc_value.__traceback__)
],
}
)
exc_value = exc_value.__cause__
output["path"] = self.request.path
output["args"] = self.request.args
output["exceptions"] = exceptions[::-1]
return output
def title(self):
return STATUS_CODES.get(self.status, b"Error Occurred").decode()
RENDERERS_BY_CONFIG = {
"html": HTMLRenderer,
"json": JSONRenderer,
"text": TextRenderer,
}
RENDERERS_BY_CONTENT_TYPE = {
"text/plain": TextRenderer,
"application/json": JSONRenderer,
"multipart/form-data": HTMLRenderer,
"text/html": HTMLRenderer,
}
CONTENT_TYPE_BY_RENDERERS = {
v: k for k, v in RENDERERS_BY_CONTENT_TYPE.items()
}
class InvalidUsage(SanicException):
"""
**Status**: 400 Bad Request
"""
status_code = 400
quiet = True
class Request:
"""
Properties of an HTTP request such as URL, headers, etc.
"""
__slots__ = (
"__weakref__",
"_cookies",
"_id",
"_ip",
"_parsed_url",
"_port",
"_protocol",
"_remote_addr",
"_socket",
"_match_info",
"_name",
"app",
"body",
"conn_info",
"ctx",
"head",
"headers",
"method",
"parsed_accept",
"parsed_args",
"parsed_not_grouped_args",
"parsed_files",
"parsed_form",
"parsed_json",
"parsed_forwarded",
"raw_url",
"request_middleware_started",
"route",
"stream",
"transport",
"version",
)
def __init__(
self,
url_bytes: bytes,
headers: Header,
version: str,
method: str,
transport: TransportProtocol,
app: Sanic,
head: bytes = b"",
):
self.raw_url = url_bytes
# TODO: Content-Encoding detection
self._parsed_url = parse_url(url_bytes)
self._id: Optional[Union[uuid.UUID, str, int]] = None
self._name: Optional[str] = None
self.app = app
self.headers = Header(headers)
self.version = version
self.method = method
self.transport = transport
self.head = head
# Init but do not inhale
self.body = b""
self.conn_info: Optional[ConnInfo] = None
self.ctx = SimpleNamespace()
self.parsed_forwarded: Optional[Options] = None
self.parsed_accept: Optional[AcceptContainer] = None
self.parsed_json = None
self.parsed_form = None
self.parsed_files = None
self.parsed_args: DefaultDict[
Tuple[bool, bool, str, str], RequestParameters
] = defaultdict(RequestParameters)
self.parsed_not_grouped_args: DefaultDict[
Tuple[bool, bool, str, str], List[Tuple[str, str]]
] = defaultdict(list)
self.request_middleware_started = False
self._cookies: Optional[Dict[str, str]] = None
self._match_info: Dict[str, Any] = {}
self.stream: Optional[Http] = None
self.route: Optional[Route] = None
self._protocol = None
def __repr__(self):
class_name = self.__class__.__name__
return f"<{class_name}: {self.method} {self.path}>"
def generate_id(*_):
return uuid.uuid4()
async def respond(
self,
response: Optional[BaseHTTPResponse] = None,
*,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
# This logic of determining which response to use is subject to change
if response is None:
response = (self.stream and self.stream.response) or HTTPResponse(
status=status,
headers=headers,
content_type=content_type,
)
# Connect the response
if isinstance(response, BaseHTTPResponse) and self.stream:
response = self.stream.respond(response)
# Run response middleware
try:
response = await self.app._run_response_middleware(
self, response, request_name=self.name
)
except CancelledErrors:
raise
except Exception:
error_logger.exception(
"Exception occurred in one of response middleware handlers"
)
return response
async def receive_body(self):
"""Receive request.body, if not already received.
Streaming handlers may call this to receive the full body. Sanic calls
this function before running any handlers of non-streaming routes.
Custom request classes can override this for custom handling of both
streaming and non-streaming routes.
"""
if not self.body:
self.body = b"".join([data async for data in self.stream])
def name(self):
if self._name:
return self._name
elif self.route:
return self.route.name
return None
def endpoint(self):
return self.name
def uri_template(self):
return f"/{self.route.path}"
def protocol(self):
if not self._protocol:
self._protocol = self.transport.get_protocol()
return self._protocol
def raw_headers(self):
_, headers = self.head.split(b"\r\n", 1)
return bytes(headers)
def request_line(self):
reqline, _ = self.head.split(b"\r\n", 1)
return bytes(reqline)
def id(self) -> Optional[Union[uuid.UUID, str, int]]:
"""
A request ID passed from the client, or generated from the backend.
By default, this will look in a request header defined at:
``self.app.config.REQUEST_ID_HEADER``. It defaults to
``X-Request-ID``. Sanic will try to cast the ID into a ``UUID`` or an
``int``. If there is not a UUID from the client, then Sanic will try
to generate an ID by calling ``Request.generate_id()``. The default
behavior is to generate a ``UUID``. You can customize this behavior
by subclassing ``Request``.
.. code-block:: python
from sanic import Request, Sanic
from itertools import count
class IntRequest(Request):
counter = count()
def generate_id(self):
return next(self.counter)
app = Sanic("MyApp", request_class=IntRequest)
"""
if not self._id:
self._id = self.headers.getone(
self.app.config.REQUEST_ID_HEADER,
self.__class__.generate_id(self), # type: ignore
)
# Try casting to a UUID or an integer
if isinstance(self._id, str):
try:
self._id = uuid.UUID(self._id)
except ValueError:
try:
self._id = int(self._id) # type: ignore
except ValueError:
...
return self._id # type: ignore
def json(self):
if self.parsed_json is None:
self.load_json()
return self.parsed_json
def load_json(self, loads=json_loads):
try:
self.parsed_json = loads(self.body)
except Exception:
if not self.body:
return None
raise InvalidUsage("Failed when parsing body as json")
return self.parsed_json
def accept(self) -> AcceptContainer:
if self.parsed_accept is None:
accept_header = self.headers.getone("accept", "")
self.parsed_accept = parse_accept(accept_header)
return self.parsed_accept
def token(self):
"""Attempt to return the auth header token.
:return: token related to request
"""
prefixes = ("Bearer", "Token")
auth_header = self.headers.getone("authorization", None)
if auth_header is not None:
for prefix in prefixes:
if prefix in auth_header:
return auth_header.partition(prefix)[-1].strip()
return auth_header
def form(self):
if self.parsed_form is None:
self.parsed_form = RequestParameters()
self.parsed_files = RequestParameters()
content_type = self.headers.getone(
"content-type", DEFAULT_HTTP_CONTENT_TYPE
)
content_type, parameters = parse_content_header(content_type)
try:
if content_type == "application/x-www-form-urlencoded":
self.parsed_form = RequestParameters(
parse_qs(self.body.decode("utf-8"))
)
elif content_type == "multipart/form-data":
# TODO: Stream this instead of reading to/from memory
boundary = parameters["boundary"].encode("utf-8")
self.parsed_form, self.parsed_files = parse_multipart_form(
self.body, boundary
)
except Exception:
error_logger.exception("Failed when parsing form")
return self.parsed_form
def files(self):
if self.parsed_files is None:
self.form # compute form to get files
return self.parsed_files
def get_args(
self,
keep_blank_values: bool = False,
strict_parsing: bool = False,
encoding: str = "utf-8",
errors: str = "replace",
) -> RequestParameters:
"""
Method to parse `query_string` using `urllib.parse.parse_qs`.
This methods is used by `args` property.
Can be used directly if you need to change default parameters.
:param keep_blank_values:
flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
:type keep_blank_values: bool
:param strict_parsing:
flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
:type strict_parsing: bool
:param encoding:
specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type encoding: str
:param errors:
specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type errors: str
:return: RequestParameters
"""
if (
keep_blank_values,
strict_parsing,
encoding,
errors,
) not in self.parsed_args:
if self.query_string:
self.parsed_args[
(keep_blank_values, strict_parsing, encoding, errors)
] = RequestParameters(
parse_qs(
qs=self.query_string,
keep_blank_values=keep_blank_values,
strict_parsing=strict_parsing,
encoding=encoding,
errors=errors,
)
)
return self.parsed_args[
(keep_blank_values, strict_parsing, encoding, errors)
]
args = property(get_args)
def get_query_args(
self,
keep_blank_values: bool = False,
strict_parsing: bool = False,
encoding: str = "utf-8",
errors: str = "replace",
) -> list:
"""
Method to parse `query_string` using `urllib.parse.parse_qsl`.
This methods is used by `query_args` property.
Can be used directly if you need to change default parameters.
:param keep_blank_values:
flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
:type keep_blank_values: bool
:param strict_parsing:
flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
:type strict_parsing: bool
:param encoding:
specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type encoding: str
:param errors:
specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type errors: str
:return: list
"""
if (
keep_blank_values,
strict_parsing,
encoding,
errors,
) not in self.parsed_not_grouped_args:
if self.query_string:
self.parsed_not_grouped_args[
(keep_blank_values, strict_parsing, encoding, errors)
] = parse_qsl(
qs=self.query_string,
keep_blank_values=keep_blank_values,
strict_parsing=strict_parsing,
encoding=encoding,
errors=errors,
)
return self.parsed_not_grouped_args[
(keep_blank_values, strict_parsing, encoding, errors)
]
query_args = property(get_query_args)
"""
Convenience property to access :meth:`Request.get_query_args` with
default values.
"""
def cookies(self) -> Dict[str, str]:
"""
:return: Incoming cookies on the request
:rtype: Dict[str, str]
"""
if self._cookies is None:
cookie = self.headers.getone("cookie", None)
if cookie is not None:
cookies: SimpleCookie = SimpleCookie()
cookies.load(cookie)
self._cookies = {
name: cookie.value for name, cookie in cookies.items()
}
else:
self._cookies = {}
return self._cookies
def content_type(self) -> str:
"""
:return: Content-Type header form the request
:rtype: str
"""
return self.headers.getone("content-type", DEFAULT_HTTP_CONTENT_TYPE)
def match_info(self):
"""
:return: matched info after resolving route
"""
return self._match_info
def match_info(self, value):
self._match_info = value
# Transport properties (obtained from local interface only)
def ip(self) -> str:
"""
:return: peer ip of the socket
:rtype: str
"""
return self.conn_info.client_ip if self.conn_info else ""
def port(self) -> int:
"""
:return: peer port of the socket
:rtype: int
"""
return self.conn_info.client_port if self.conn_info else 0
def socket(self):
return self.conn_info.peername if self.conn_info else (None, None)
def path(self) -> str:
"""
:return: path of the local HTTP request
:rtype: str
"""
return self._parsed_url.path.decode("utf-8")
# Proxy properties (using SERVER_NAME/forwarded/request/transport info)
def forwarded(self) -> Options:
"""
Active proxy information obtained from request headers, as specified in
Sanic configuration.
Field names by, for, proto, host, port and path are normalized.
- for and by IPv6 addresses are bracketed
- port (int) is only set by port headers, not from host.
- path is url-unencoded
Additional values may be available from new style Forwarded headers.
:return: forwarded address info
:rtype: Dict[str, str]
"""
if self.parsed_forwarded is None:
self.parsed_forwarded = (
parse_forwarded(self.headers, self.app.config)
or parse_xforwarded(self.headers, self.app.config)
or {}
)
return self.parsed_forwarded
def remote_addr(self) -> str:
"""
Client IP address, if available.
1. proxied remote address `self.forwarded['for']`
2. local remote address `self.ip`
:return: IPv4, bracketed IPv6, UNIX socket name or arbitrary string
:rtype: str
"""
if not hasattr(self, "_remote_addr"):
self._remote_addr = str(
self.forwarded.get("for", "")
) # or self.ip
return self._remote_addr
def scheme(self) -> str:
"""
Determine request scheme.
1. `config.SERVER_NAME` if in full URL format
2. proxied proto/scheme
3. local connection protocol
:return: http|https|ws|wss or arbitrary value given by the headers.
:rtype: str
"""
if "//" in self.app.config.get("SERVER_NAME", ""):
return self.app.config.SERVER_NAME.split("//")[0]
if "proto" in self.forwarded:
return str(self.forwarded["proto"])
if (
self.app.websocket_enabled
and self.headers.getone("upgrade", "").lower() == "websocket"
):
scheme = "ws"
else:
scheme = "http"
if self.transport.get_extra_info("sslcontext"):
scheme += "s"
return scheme
def host(self) -> str:
"""
The currently effective server 'host' (hostname or hostname:port).
1. `config.SERVER_NAME` overrides any client headers
2. proxied host of original request
3. request host header
hostname and port may be separated by
`sanic.headers.parse_host(request.host)`.
:return: the first matching host found, or empty string
:rtype: str
"""
server_name = self.app.config.get("SERVER_NAME")
if server_name:
return server_name.split("//", 1)[-1].split("/", 1)[0]
return str(
self.forwarded.get("host") or self.headers.getone("host", "")
)
def server_name(self) -> str:
"""
:return: hostname the client connected to, by ``request.host``
:rtype: str
"""
return parse_host(self.host)[0] or ""
def server_port(self) -> int:
"""
The port the client connected to, by forwarded ``port`` or
``request.host``.
Default port is returned as 80 and 443 based on ``request.scheme``.
:return: port number
:rtype: int
"""
port = self.forwarded.get("port") or parse_host(self.host)[1]
return int(port or (80 if self.scheme in ("http", "ws") else 443))
def server_path(self) -> str:
"""
:return: full path of current URL; uses proxied or local path
:rtype: str
"""
return str(self.forwarded.get("path") or self.path)
def query_string(self) -> str:
"""
:return: representation of the requested query
:rtype: str
"""
if self._parsed_url.query:
return self._parsed_url.query.decode("utf-8")
else:
return ""
def url(self) -> str:
"""
:return: the URL
:rtype: str
"""
return urlunparse(
(self.scheme, self.host, self.path, None, self.query_string, None)
)
def url_for(self, view_name: str, **kwargs) -> str:
"""
Same as :func:`sanic.Sanic.url_for`, but automatically determine
`scheme` and `netloc` base on the request. Since this method is aiming
to generate correct schema & netloc, `_external` is implied.
:param kwargs: takes same parameters as in :func:`sanic.Sanic.url_for`
:return: an absolute url to the given view
:rtype: str
"""
# Full URL SERVER_NAME can only be handled in app.url_for
try:
if "//" in self.app.config.SERVER_NAME:
return self.app.url_for(view_name, _external=True, **kwargs)
except AttributeError:
pass
scheme = self.scheme
host = self.server_name
port = self.server_port
if (scheme.lower() in ("http", "ws") and port == 80) or (
scheme.lower() in ("https", "wss") and port == 443
):
netloc = host
else:
netloc = f"{host}:{port}"
return self.app.url_for(
view_name, _external=True, _scheme=scheme, _server=netloc, **kwargs
)
class HTTPResponse(BaseHTTPResponse):
"""
HTTP response to be sent back to the client.
:param body: the body content to be returned
:type body: Optional[bytes]
:param status: HTTP response number. **Default=200**
:type status: int
:param headers: headers to be returned
:type headers: Optional;
:param content_type: content type to be returned (as a header)
:type content_type: Optional[str]
"""
__slots__ = ("body", "status", "content_type", "headers", "_cookies")
def __init__(
self,
body: Optional[AnyStr] = None,
status: int = 200,
headers: Optional[Union[Header, Dict[str, str]]] = None,
content_type: Optional[str] = None,
):
super().__init__()
self.content_type: Optional[str] = content_type
self.body = self._encode_body(body)
self.status = status
self.headers = Header(headers or {})
self._cookies = None
async def eof(self):
await self.send("", True)
async def __aenter__(self):
return self.send
async def __aexit__(self, *_):
await self.eof()
def json(
body: Any,
status: int = 200,
headers: Optional[Dict[str, str]] = None,
content_type: str = "application/json",
dumps: Optional[Callable[..., str]] = None,
**kwargs,
) -> HTTPResponse:
"""
Returns response object with body in json format.
:param body: Response data to be serialized.
:param status: Response code.
:param headers: Custom Headers.
:param kwargs: Remaining arguments that are passed to the json encoder.
"""
if not dumps:
dumps = BaseHTTPResponse._dumps
return HTTPResponse(
dumps(body, **kwargs),
headers=headers,
status=status,
content_type=content_type,
)
The provided code snippet includes necessary dependencies for implementing the `exception_response` function. Write a Python function `def exception_response( request: Request, exception: Exception, debug: bool, fallback: str, base: t.Type[BaseRenderer], renderer: t.Type[t.Optional[BaseRenderer]] = None, ) -> HTTPResponse` to solve the following problem:
Render a response for the default FALLBACK exception handler.
Here is the function:
def exception_response(
request: Request,
exception: Exception,
debug: bool,
fallback: str,
base: t.Type[BaseRenderer],
renderer: t.Type[t.Optional[BaseRenderer]] = None,
) -> HTTPResponse:
"""
Render a response for the default FALLBACK exception handler.
"""
content_type = None
if not renderer:
# Make sure we have something set
renderer = base
render_format = fallback
if request:
# If there is a request, try and get the format
# from the route
if request.route:
try:
if request.route.ctx.error_format:
render_format = request.route.ctx.error_format
except AttributeError:
...
content_type = request.headers.getone("content-type", "").split(
";"
)[0]
acceptable = request.accept
# If the format is auto still, make a guess
if render_format == "auto":
# First, if there is an Accept header, check if text/html
# is the first option
# According to MDN Web Docs, all major browsers use text/html
# as the primary value in Accept (with the exception of IE 8,
# and, well, if you are supporting IE 8, then you have bigger
# problems to concern yourself with than what default exception
# renderer is used)
# Source:
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation/List_of_default_Accept_values
if acceptable and acceptable[0].match(
"text/html",
allow_type_wildcard=False,
allow_subtype_wildcard=False,
):
renderer = HTMLRenderer
# Second, if there is an Accept header, check if
# application/json is an option, or if the content-type
# is application/json
elif (
acceptable
and acceptable.match(
"application/json",
allow_type_wildcard=False,
allow_subtype_wildcard=False,
)
or content_type == "application/json"
):
renderer = JSONRenderer
# Third, if there is no Accept header, assume we want text.
# The likely use case here is a raw socket.
elif not acceptable:
renderer = TextRenderer
else:
# Fourth, look to see if there was a JSON body
# When in this situation, the request is probably coming
# from curl, an API client like Postman or Insomnia, or a
# package like requests or httpx
try:
# Give them the benefit of the doubt if they did:
# $ curl localhost:8000 -d '{"foo": "bar"}'
# And provide them with JSONRenderer
renderer = JSONRenderer if request.json else base
except InvalidUsage:
renderer = base
else:
renderer = RENDERERS_BY_CONFIG.get(render_format, renderer)
# Lastly, if there is an Accept header, make sure
# our choice is okay
if acceptable:
type_ = CONTENT_TYPE_BY_RENDERERS.get(renderer) # type: ignore
if type_ and type_ not in acceptable:
# If the renderer selected is not in the Accept header
# look through what is in the Accept header, and select
# the first option that matches. Otherwise, just drop back
# to the original default
for accept in acceptable:
mtype = f"{accept.type_}/{accept.subtype}"
maybe = RENDERERS_BY_CONTENT_TYPE.get(mtype)
if maybe:
renderer = maybe
break
else:
renderer = base
renderer = t.cast(t.Type[BaseRenderer], renderer)
return renderer(request, exception, debug).render() | Render a response for the default FALLBACK exception handler. |
157,674 | from os import path
import sys
def _main(
draft,
directory,
project_name,
project_version,
project_date,
answer_yes,
):
return towncrier.__main(
draft,
directory,
project_name,
project_version,
project_date,
answer_yes,
) | null |
157,675 | from argparse import ArgumentParser, Namespace
from collections import OrderedDict
from configparser import RawConfigParser
from datetime import datetime
from json import dumps
from os import path, chdir
from subprocess import Popen, PIPE
from jinja2 import Environment, BaseLoader
from requests import patch
import sys
import towncrier
def _fetch_current_version(config_file: str) -> str:
def _get_new_version(
config_file: str = "./setup.cfg",
current_version: str = None,
micro_release: bool = False,
):
def _get_current_tag(git_command_name="get_tag"):
def _update_release_version_for_sanic(
current_version, new_version, config_file, generate_changelog
):
def _tag_release(new_version, current_version, milestone, release_name, token):
def release(args: Namespace):
current_tag = _get_current_tag()
current_version = _fetch_current_version(args.config)
if current_tag and current_version not in current_tag:
print(
"Tag mismatch between what's in git and what was provided by "
"--current-version. Existing: {}, Give: {}".format(
current_tag, current_version
)
)
sys.exit(1)
new_version = args.release_version or _get_new_version(
args.config, current_version, args.micro_release
)
_update_release_version_for_sanic(
current_version=current_version,
new_version=new_version,
config_file=args.config,
generate_changelog=args.generate_changelog,
)
if args.tag_release:
_tag_release(
current_version=current_version,
new_version=new_version,
milestone=args.milestone,
release_name=args.release_name,
token=args.token,
) | null |
157,676 | import os
import sys
import types
def get_module_docstring(filepath):
with open(filepath) as file:
source = file.read()
co = compile(source, filepath, "exec")
if co.co_consts and isinstance(co.co_consts[0], str):
docstring = co.co_consts[0]
else:
docstring = None
return docstring
def setup(app):
module_name = "autodoc_stub_file.loguru"
stub_path = os.path.join("..", "loguru", "__init__.pyi")
docstring = get_module_docstring(stub_path)
module = types.ModuleType(module_name, docstring)
sys.modules[module_name] = module | null |
157,677 | import os
import sys
def setup(app):
app.add_css_file("css/loguru.css")
app.add_js_file("js/coppybutton.js") | null |
157,678 | import functools
import json
import multiprocessing
import os
from threading import Thread
from ._colorizer import Colorizer
from ._locks_machinery import create_handler_lock
class Colorizer:
def prepare_format(string):
tokens, messages_color_tokens = Colorizer._parse_without_formatting(string)
return ColoredFormat(tokens, messages_color_tokens)
def prepare_message(string, args=(), kwargs={}):
tokens = Colorizer._parse_with_formatting(string, args, kwargs)
return ColoredMessage(tokens)
def prepare_simple_message(string):
parser = AnsiParser()
parser.feed(string)
tokens = parser.done()
return ColoredMessage(tokens)
def ansify(text):
parser = AnsiParser()
parser.feed(text.strip())
tokens = parser.done(strict=False)
return AnsiParser.colorize(tokens, None)
def _parse_with_formatting(
string, args, kwargs, *, recursion_depth=2, auto_arg_index=0, recursive=False
):
# This function re-implements Formatter._vformat()
if recursion_depth < 0:
raise ValueError("Max string recursion exceeded")
formatter = Formatter()
parser = AnsiParser()
for literal_text, field_name, format_spec, conversion in formatter.parse(string):
parser.feed(literal_text, raw=recursive)
if field_name is not None:
if field_name == "":
if auto_arg_index is False:
raise ValueError(
"cannot switch from manual field "
"specification to automatic field "
"numbering"
)
field_name = str(auto_arg_index)
auto_arg_index += 1
elif field_name.isdigit():
if auto_arg_index:
raise ValueError(
"cannot switch from manual field "
"specification to automatic field "
"numbering"
)
auto_arg_index = False
obj, _ = formatter.get_field(field_name, args, kwargs)
obj = formatter.convert_field(obj, conversion)
format_spec, auto_arg_index = Colorizer._parse_with_formatting(
format_spec,
args,
kwargs,
recursion_depth=recursion_depth - 1,
auto_arg_index=auto_arg_index,
recursive=True,
)
formatted = formatter.format_field(obj, format_spec)
parser.feed(formatted, raw=True)
tokens = parser.done()
if recursive:
return AnsiParser.strip(tokens), auto_arg_index
return tokens
def _parse_without_formatting(string, *, recursion_depth=2, recursive=False):
if recursion_depth < 0:
raise ValueError("Max string recursion exceeded")
formatter = Formatter()
parser = AnsiParser()
messages_color_tokens = []
for literal_text, field_name, format_spec, conversion in formatter.parse(string):
if literal_text and literal_text[-1] in "{}":
literal_text += literal_text[-1]
parser.feed(literal_text, raw=recursive)
if field_name is not None:
if field_name == "message":
if recursive:
messages_color_tokens.append(None)
else:
color_tokens = parser.current_color_tokens()
messages_color_tokens.append(color_tokens)
field = "{%s" % field_name
if conversion:
field += "!%s" % conversion
if format_spec:
field += ":%s" % format_spec
field += "}"
parser.feed(field, raw=True)
_, color_tokens = Colorizer._parse_without_formatting(
format_spec, recursion_depth=recursion_depth - 1, recursive=True
)
messages_color_tokens.extend(color_tokens)
return parser.done(), messages_color_tokens
def prepare_colored_format(format_, ansi_level):
colored = Colorizer.prepare_format(format_)
return colored, colored.colorize(ansi_level) | null |
157,679 | import functools
import json
import multiprocessing
import os
from threading import Thread
from ._colorizer import Colorizer
from ._locks_machinery import create_handler_lock
class Colorizer:
def prepare_format(string):
def prepare_message(string, args=(), kwargs={}):
def prepare_simple_message(string):
def ansify(text):
def _parse_with_formatting(
string, args, kwargs, *, recursion_depth=2, auto_arg_index=0, recursive=False
):
def _parse_without_formatting(string, *, recursion_depth=2, recursive=False):
def prepare_stripped_format(format_):
colored = Colorizer.prepare_format(format_)
return colored.strip() | null |
157,680 | import functools
import json
import multiprocessing
import os
from threading import Thread
from ._colorizer import Colorizer
from ._locks_machinery import create_handler_lock
def memoize(function):
return functools.lru_cache(maxsize=64)(function) | null |
157,681 | import re
from calendar import day_abbr, day_name, month_abbr, month_name
from datetime import datetime as datetime_
from datetime import timedelta, timezone
from time import localtime, strftime
class datetime(datetime_):
def __format__(self, spec):
if spec.endswith("!UTC"):
dt = self.astimezone(timezone.utc)
spec = spec[:-4]
else:
dt = self
if not spec:
spec = "%Y-%m-%dT%H:%M:%S.%f%z"
if "%" in spec:
return datetime_.__format__(dt, spec)
year, month, day, hour, minute, second, weekday, yearday, _ = dt.timetuple()
microsecond = dt.microsecond
timestamp = dt.timestamp()
tzinfo = dt.tzinfo or timezone(timedelta(seconds=0))
offset = tzinfo.utcoffset(dt).total_seconds()
sign = ("-", "+")[offset >= 0]
h, m = divmod(abs(offset // 60), 60)
rep = {
"YYYY": "%04d" % year,
"YY": "%02d" % (year % 100),
"Q": "%d" % ((month - 1) // 3 + 1),
"MMMM": month_name[month],
"MMM": month_abbr[month],
"MM": "%02d" % month,
"M": "%d" % month,
"DDDD": "%03d" % yearday,
"DDD": "%d" % yearday,
"DD": "%02d" % day,
"D": "%d" % day,
"dddd": day_name[weekday],
"ddd": day_abbr[weekday],
"d": "%d" % weekday,
"E": "%d" % (weekday + 1),
"HH": "%02d" % hour,
"H": "%d" % hour,
"hh": "%02d" % ((hour - 1) % 12 + 1),
"h": "%d" % ((hour - 1) % 12 + 1),
"mm": "%02d" % minute,
"m": "%d" % minute,
"ss": "%02d" % second,
"s": "%d" % second,
"S": "%d" % (microsecond // 100000),
"SS": "%02d" % (microsecond // 10000),
"SSS": "%03d" % (microsecond // 1000),
"SSSS": "%04d" % (microsecond // 100),
"SSSSS": "%05d" % (microsecond // 10),
"SSSSSS": "%06d" % microsecond,
"A": ("AM", "PM")[hour // 12],
"Z": "%s%02d:%02d" % (sign, h, m),
"ZZ": "%s%02d%02d" % (sign, h, m),
"zz": tzinfo.tzname(dt) or "",
"X": "%d" % timestamp,
"x": "%d" % (int(timestamp) * 1000000 + microsecond),
}
def get(m):
try:
return rep[m.group(0)]
except KeyError:
return m.group(0)[1:-1]
return pattern.sub(get, spec)
def aware_now():
now = datetime.now()
timestamp = now.timestamp()
local = localtime(timestamp)
try:
seconds = local.tm_gmtoff
zone = local.tm_zone
except AttributeError:
offset = datetime.fromtimestamp(timestamp) - datetime.utcfromtimestamp(timestamp)
seconds = offset.total_seconds()
zone = strftime("%Z")
tzinfo = timezone(timedelta(seconds=seconds), zone)
return now.replace(tzinfo=tzinfo) | null |
157,682 | from os import environ
P IHW#S
def env(key, type_, default=None):
if key not in environ:
return default
val = environ[key]
if type_ == str:
return val
elif type_ == bool:
if val.lower() in ["1", "true", "yes", "y", "ok", "on"]:
return True
if val.lower() in ["0", "false", "no", "n", "nok", "off"]:
return False
raise ValueError(
"Invalid environment variable '%s' (expected a boolean): '%s'" % (key, val)
)
elif type_ == int:
try:
return int(val)
except ValueError:
raise ValueError(
"Invalid environment variable '%s' (expected an integer): '%s'" % (key, val)
) from None | null |
157,683 | import os
if os.name == "nt":
import win32_setctime
elif hasattr(os.stat_result, "st_birthtime"):
elif hasattr(os, "getxattr") and hasattr(os, "setxattr"):
else:
def get_ctime(filepath):
return os.stat(filepath).st_ctime | null |
157,684 | import os
def set_ctime(filepath, timestamp):
if not win32_setctime.SUPPORTED:
return
try:
win32_setctime.setctime(filepath, timestamp)
except (OSError, ValueError):
pass | null |
157,685 | import os
if os.name == "nt":
import win32_setctime
elif hasattr(os.stat_result, "st_birthtime"):
elif hasattr(os, "getxattr") and hasattr(os, "setxattr"):
else:
def get_ctime(filepath):
return os.stat(filepath).st_birthtime | null |
157,686 | import os
if os.name == "nt":
import win32_setctime
elif hasattr(os.stat_result, "st_birthtime"):
elif hasattr(os, "getxattr") and hasattr(os, "setxattr"):
else:
def get_ctime(filepath):
try:
return float(os.getxattr(filepath, b"user.loguru_crtime"))
except OSError:
return os.stat(filepath).st_mtime | null |
157,687 | import os
if os.name == "nt":
import win32_setctime
elif hasattr(os.stat_result, "st_birthtime"):
elif hasattr(os, "getxattr") and hasattr(os, "setxattr"):
else:
def set_ctime(filepath, timestamp):
try:
os.setxattr(filepath, b"user.loguru_crtime", str(timestamp).encode("ascii"))
except OSError:
pass | null |
157,688 | import os
if os.name == "nt":
import win32_setctime
elif hasattr(os.stat_result, "st_birthtime"):
elif hasattr(os, "getxattr") and hasattr(os, "setxattr"):
else:
def get_ctime(filepath):
return os.stat(filepath).st_mtime | null |
157,689 | import asyncio
import logging
import sys
import weakref
def get_loop(task):
return task.get_loop() | null |
157,690 | import asyncio
import logging
import sys
import weakref
def get_loop(task):
return task._loop | null |
157,691 | import os
import sys
def should_colorize(stream):
if stream is None:
return False
if stream is sys.__stdout__ or stream is sys.__stderr__:
if "PYCHARM_HOSTED" in os.environ:
return True
if os.name == "nt" and "TERM" in os.environ:
return True
try:
return stream.isatty()
except Exception:
return False | null |
157,692 | import os
import sys
def should_wrap(stream):
if os.name != "nt":
return False
if stream is not sys.__stdout__ and stream is not sys.__stderr__:
return False
from colorama.win32 import winapi_test
return winapi_test() | null |
157,693 | import os
import sys
def wrap(stream):
from colorama import AnsiToWin32
return AnsiToWin32(stream, convert=True, strip=False, autoreset=False).stream | null |
157,694 | import re
from string import Formatter
def ansi_escape(codes):
return {name: "\033[%dm" % code for name, code in codes.items()} | null |
157,695 | import os
import threading
import weakref
def create_logger_lock():
return threading.Lock() | null |
157,696 | import os
import threading
import weakref
def create_handler_lock():
return threading.Lock() | null |
157,697 | import os
import threading
import weakref
def acquire_locks():
for lock in logger_locks:
lock.acquire()
for lock in handler_locks:
lock.acquire() | null |
157,698 | import os
import threading
import weakref
def release_locks():
for lock in logger_locks:
lock.release()
for lock in handler_locks:
lock.release() | null |
157,699 | import os
import threading
import weakref
def create_logger_lock():
lock = threading.Lock()
logger_locks.add(lock)
return lock | null |
157,700 | import os
import threading
import weakref
def create_handler_lock():
lock = threading.Lock()
handler_locks.add(lock)
return lock | null |
157,701 | import sys
from sys import exc_info
def get_frame_fallback(n):
try:
raise Exception
except Exception:
frame = exc_info()[2].tb_frame.f_back
for _ in range(n):
frame = frame.f_back
return frame | null |
157,702 | import datetime
import re
def parse_size(size):
size = size.strip()
reg = re.compile(r"([e\+\-\.\d]+)\s*([kmgtpezy])?(i)?(b)", flags=re.I)
match = reg.fullmatch(size)
if not match:
return None
s, u, i, b = match.groups()
try:
s = float(s)
except ValueError as e:
raise ValueError("Invalid float value while parsing size: '%s'" % s) from e
u = "kmgtpezy".index(u.lower()) + 1 if u else 0
i = 1024 if i else 1000
b = {"b": 8, "B": 1}[b] if b else 1
size = s * i ** u / b
return size | null |
157,703 | import datetime
import re
def parse_duration(duration):
duration = duration.strip()
reg = r"(?:([e\+\-\.\d]+)\s*([a-z]+)[\s\,]*)"
units = [
("y|years?", 31536000),
("months?", 2628000),
("w|weeks?", 604800),
("d|days?", 86400),
("h|hours?", 3600),
("min(?:ute)?s?", 60),
("s|sec(?:ond)?s?", 1),
("ms|milliseconds?", 0.001),
("us|microseconds?", 0.000001),
]
if not re.fullmatch(reg + "+", duration, flags=re.I):
return None
seconds = 0
for value, unit in re.findall(reg, duration, flags=re.I):
try:
value = float(value)
except ValueError as e:
raise ValueError("Invalid float value while parsing duration: '%s'" % value) from e
try:
unit = next(u for r, u in units if re.fullmatch(r, unit, flags=re.I))
except StopIteration:
raise ValueError("Invalid unit value while parsing duration: '%s'" % unit) from None
seconds += value * unit
return datetime.timedelta(seconds=seconds) | null |
157,704 | import datetime
import re
class Frequencies:
def hourly(t):
dt = t + datetime.timedelta(hours=1)
return dt.replace(minute=0, second=0, microsecond=0)
def daily(t):
dt = t + datetime.timedelta(days=1)
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
def weekly(t):
dt = t + datetime.timedelta(days=7 - t.weekday())
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
def monthly(t):
if t.month == 12:
y, m = t.year + 1, 1
else:
y, m = t.year, t.month + 1
return t.replace(year=y, month=m, day=1, hour=0, minute=0, second=0, microsecond=0)
def yearly(t):
y = t.year + 1
return t.replace(year=y, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
def parse_frequency(frequency):
frequencies = {
"hourly": Frequencies.hourly,
"daily": Frequencies.daily,
"weekly": Frequencies.weekly,
"monthly": Frequencies.monthly,
"yearly": Frequencies.yearly,
}
frequency = frequency.strip().lower()
return frequencies.get(frequency, None) | null |
157,705 | import datetime
import re
def parse_day(day):
def parse_time(time):
def parse_daytime(daytime):
daytime = daytime.strip()
reg = re.compile(r"^(.*?)\s+at\s+(.*)$", flags=re.I)
match = reg.match(daytime)
if match:
day, time = match.groups()
else:
day = time = daytime
try:
day = parse_day(day)
if match and day is None:
raise ValueError
except ValueError as e:
raise ValueError("Invalid day while parsing daytime: '%s'" % day) from e
try:
time = parse_time(time)
if match and time is None:
raise ValueError
except ValueError as e:
raise ValueError("Invalid time while parsing daytime: '%s'" % time) from e
if day is None and time is None:
return None
return day, time | null |
157,706 |
def filter_none(record):
return record["name"] is not None | null |
157,707 |
def filter_by_name(record, parent, length):
name = record["name"]
if name is None:
return False
return (name + ".")[:length] == parent | null |
157,708 |
def filter_by_level(record, level_per_module):
name = record["name"]
while True:
level = level_per_module.get(name, None)
if level is False:
return False
if level is not None:
return record["level"].no >= level
if not name:
return True
index = name.rfind(".")
name = name[:index] if index != -1 else "" | null |
157,709 | import datetime as datetime_
import decimal
import glob
import locale
import numbers
import os
import shutil
import string
from functools import partial
from . import _string_parsers as string_parsers
from ._ctime_functions import get_ctime, set_ctime
from ._datetime import aware_now, datetime
class FileDateFormatter:
def __init__(self, datetime=None):
self.datetime = datetime or aware_now()
def __format__(self, spec):
if not spec:
spec = "%Y-%m-%d_%H-%M-%S_%f"
return self.datetime.__format__(spec)
from datetime import datetime as datetime_
from datetime import timedelta, timezone
class datetime(datetime_):
def __format__(self, spec):
if spec.endswith("!UTC"):
dt = self.astimezone(timezone.utc)
spec = spec[:-4]
else:
dt = self
if not spec:
spec = "%Y-%m-%dT%H:%M:%S.%f%z"
if "%" in spec:
return datetime_.__format__(dt, spec)
year, month, day, hour, minute, second, weekday, yearday, _ = dt.timetuple()
microsecond = dt.microsecond
timestamp = dt.timestamp()
tzinfo = dt.tzinfo or timezone(timedelta(seconds=0))
offset = tzinfo.utcoffset(dt).total_seconds()
sign = ("-", "+")[offset >= 0]
h, m = divmod(abs(offset // 60), 60)
rep = {
"YYYY": "%04d" % year,
"YY": "%02d" % (year % 100),
"Q": "%d" % ((month - 1) // 3 + 1),
"MMMM": month_name[month],
"MMM": month_abbr[month],
"MM": "%02d" % month,
"M": "%d" % month,
"DDDD": "%03d" % yearday,
"DDD": "%d" % yearday,
"DD": "%02d" % day,
"D": "%d" % day,
"dddd": day_name[weekday],
"ddd": day_abbr[weekday],
"d": "%d" % weekday,
"E": "%d" % (weekday + 1),
"HH": "%02d" % hour,
"H": "%d" % hour,
"hh": "%02d" % ((hour - 1) % 12 + 1),
"h": "%d" % ((hour - 1) % 12 + 1),
"mm": "%02d" % minute,
"m": "%d" % minute,
"ss": "%02d" % second,
"s": "%d" % second,
"S": "%d" % (microsecond // 100000),
"SS": "%02d" % (microsecond // 10000),
"SSS": "%03d" % (microsecond // 1000),
"SSSS": "%04d" % (microsecond // 100),
"SSSSS": "%05d" % (microsecond // 10),
"SSSSSS": "%06d" % microsecond,
"A": ("AM", "PM")[hour // 12],
"Z": "%s%02d:%02d" % (sign, h, m),
"ZZ": "%s%02d%02d" % (sign, h, m),
"zz": tzinfo.tzname(dt) or "",
"X": "%d" % timestamp,
"x": "%d" % (int(timestamp) * 1000000 + microsecond),
}
def get(m):
try:
return rep[m.group(0)]
except KeyError:
return m.group(0)[1:-1]
return pattern.sub(get, spec)
def generate_rename_path(root, ext, creation_time):
creation_datetime = datetime.fromtimestamp(creation_time)
date = FileDateFormatter(creation_datetime)
renamed_path = "{}.{}{}".format(root, date, ext)
counter = 1
while os.path.exists(renamed_path):
counter += 1
renamed_path = "{}.{}.{}{}".format(root, date, counter, ext)
return renamed_path | null |
157,710 | import os
import re
import sphinx_rtd_theme
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, *parts), encoding="utf-8") as f:
return f.read()
The provided code snippet includes necessary dependencies for implementing the `find_version` function. Write a Python function `def find_version(*file_paths)` to solve the following problem:
Build a path from *file_paths* and search for a ``__version__`` string inside.
Here is the function:
def find_version(*file_paths):
"""
Build a path from *file_paths* and search for a ``__version__``
string inside.
"""
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.") | Build a path from *file_paths* and search for a ``__version__`` string inside. |
157,711 | import base64
import binascii
from typing import Any, Union
def force_bytes(value: Union[str, bytes]) -> bytes:
if isinstance(value, str):
return value.encode("utf-8")
elif isinstance(value, bytes):
return value
else:
raise TypeError("Expected a string value") | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.