idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
225,600
def select_workers_to_close ( scheduler , n_to_close ) : workers = list ( scheduler . workers . values ( ) ) assert n_to_close <= len ( workers ) key = lambda ws : ws . metrics [ 'memory' ] to_close = set ( sorted ( scheduler . idle , key = key ) [ : n_to_close ] ) if len ( to_close ) < n_to_close : rest = sorted ( workers , key = key , reverse = True ) while len ( to_close ) < n_to_close : to_close . add ( rest . pop ( ) ) return [ ws . address for ws in to_close ]
Select n workers to close from scheduler
152
8
225,601
def from_yaml ( cls , yaml_path , * * kwargs ) : if not yaml : raise ImportError ( "PyYaml is required to use yaml functionality, please install it!" ) with open ( yaml_path ) as f : d = yaml . safe_load ( f ) d = dask . config . expand_environment_variables ( d ) return cls . from_dict ( d , * * kwargs )
Create cluster with worker pod spec defined by a YAML file
101
13
225,602
def pods ( self ) : return self . core_api . list_namespaced_pod ( self . namespace , label_selector = format_labels ( self . pod_template . metadata . labels ) ) . items
A list of kubernetes pods corresponding to current workers
47
12
225,603
def logs ( self , pod = None ) : if pod is None : return { pod . status . pod_ip : self . logs ( pod ) for pod in self . pods ( ) } return self . core_api . read_namespaced_pod_log ( pod . metadata . name , pod . metadata . namespace )
Logs from a worker pod
68
6
225,604
def scale ( self , n ) : pods = self . _cleanup_terminated_pods ( self . pods ( ) ) if n >= len ( pods ) : return self . scale_up ( n , pods = pods ) else : n_to_delete = len ( pods ) - n # Before trying to close running workers, check if we can cancel # pending pods (in case the kubernetes cluster was too full to # provision those pods in the first place). running_workers = list ( self . scheduler . workers . keys ( ) ) running_ips = set ( urlparse ( worker ) . hostname for worker in running_workers ) pending_pods = [ p for p in pods if p . status . pod_ip not in running_ips ] if pending_pods : pending_to_delete = pending_pods [ : n_to_delete ] logger . debug ( "Deleting pending pods: %s" , pending_to_delete ) self . _delete_pods ( pending_to_delete ) n_to_delete = n_to_delete - len ( pending_to_delete ) if n_to_delete <= 0 : return to_close = select_workers_to_close ( self . scheduler , n_to_delete ) logger . debug ( "Closing workers: %s" , to_close ) if len ( to_close ) < len ( self . scheduler . workers ) : # Close workers cleanly to migrate any temporary results to # remaining workers. @ gen . coroutine def f ( to_close ) : yield self . scheduler . retire_workers ( workers = to_close , remove = True , close_workers = True ) yield offload ( self . scale_down , to_close ) self . scheduler . loop . add_callback ( f , to_close ) return # Terminate all pods without waiting for clean worker shutdown self . scale_down ( to_close )
Scale cluster to n workers
418
5
225,605
def scale_up ( self , n , pods = None , * * kwargs ) : maximum = dask . config . get ( 'kubernetes.count.max' ) if maximum is not None and maximum < n : logger . info ( "Tried to scale beyond maximum number of workers %d > %d" , n , maximum ) n = maximum pods = pods or self . _cleanup_terminated_pods ( self . pods ( ) ) to_create = n - len ( pods ) new_pods = [ ] for i in range ( 3 ) : try : for _ in range ( to_create ) : new_pods . append ( self . core_api . create_namespaced_pod ( self . namespace , self . pod_template ) ) to_create -= 1 break except kubernetes . client . rest . ApiException as e : if e . status == 500 and 'ServerTimeout' in e . body : logger . info ( "Server timeout, retry #%d" , i + 1 ) time . sleep ( 1 ) last_exception = e continue else : raise else : raise last_exception return new_pods
Make sure we have n dask - workers available for this cluster
253
13
225,606
def scale_down ( self , workers , pods = None ) : # Get the existing worker pods pods = pods or self . _cleanup_terminated_pods ( self . pods ( ) ) # Work out the list of pods that we are going to delete # Each worker to delete is given in the form "tcp://<worker ip>:<port>" # Convert this to a set of IPs ips = set ( urlparse ( worker ) . hostname for worker in workers ) to_delete = [ p for p in pods if p . status . pod_ip in ips ] if not to_delete : return self . _delete_pods ( to_delete )
Remove the pods for the requested list of workers
144
9
225,607
def close ( self , * * kwargs ) : self . scale_down ( self . cluster . scheduler . workers ) return self . cluster . close ( * * kwargs )
Close this cluster
40
3
225,608
def job_to_dict ( job ) : data = OrderedDict ( ) data [ 'id' ] = job . id data [ 'name' ] = job . name data [ 'func' ] = job . func_ref data [ 'args' ] = job . args data [ 'kwargs' ] = job . kwargs data . update ( trigger_to_dict ( job . trigger ) ) if not job . pending : data [ 'misfire_grace_time' ] = job . misfire_grace_time data [ 'max_instances' ] = job . max_instances data [ 'next_run_time' ] = None if job . next_run_time is None else job . next_run_time return data
Converts a job to an OrderedDict .
165
11
225,609
def pop_trigger ( data ) : trigger_name = data . pop ( 'trigger' ) trigger_args = { } if trigger_name == 'date' : trigger_arg_names = ( 'run_date' , 'timezone' ) elif trigger_name == 'interval' : trigger_arg_names = ( 'weeks' , 'days' , 'hours' , 'minutes' , 'seconds' , 'start_date' , 'end_date' , 'timezone' ) elif trigger_name == 'cron' : trigger_arg_names = ( 'year' , 'month' , 'day' , 'week' , 'day_of_week' , 'hour' , 'minute' , 'second' , 'start_date' , 'end_date' , 'timezone' ) else : raise Exception ( 'Trigger %s is not supported.' % trigger_name ) for arg_name in trigger_arg_names : if arg_name in data : trigger_args [ arg_name ] = data . pop ( arg_name ) return trigger_name , trigger_args
Pops trigger and trigger args from a given dict .
242
11
225,610
def trigger_to_dict ( trigger ) : data = OrderedDict ( ) if isinstance ( trigger , DateTrigger ) : data [ 'trigger' ] = 'date' data [ 'run_date' ] = trigger . run_date elif isinstance ( trigger , IntervalTrigger ) : data [ 'trigger' ] = 'interval' data [ 'start_date' ] = trigger . start_date if trigger . end_date : data [ 'end_date' ] = trigger . end_date w , d , hh , mm , ss = extract_timedelta ( trigger . interval ) if w > 0 : data [ 'weeks' ] = w if d > 0 : data [ 'days' ] = d if hh > 0 : data [ 'hours' ] = hh if mm > 0 : data [ 'minutes' ] = mm if ss > 0 : data [ 'seconds' ] = ss elif isinstance ( trigger , CronTrigger ) : data [ 'trigger' ] = 'cron' if trigger . start_date : data [ 'start_date' ] = trigger . start_date if trigger . end_date : data [ 'end_date' ] = trigger . end_date for field in trigger . fields : if not field . is_default : data [ field . name ] = str ( field ) else : data [ 'trigger' ] = str ( trigger ) return data
Converts a trigger to an OrderedDict .
305
11
225,611
def fix_job_def ( job_def ) : if six . PY2 and isinstance ( job_def . get ( 'func' ) , six . text_type ) : # when a job comes from the endpoint, strings are unicode # because that's how json package deserialize the bytes. # we had a case where APScheduler failed to import the func based # on its name because Py2 expected a str and not unicode on __import__(). # it happened only for a user, I wasn't able to determine why that occurred for him, # a workaround is to convert the func to str. # full story: https://github.com/viniciuschiele/flask-apscheduler/issues/75 job_def [ 'func' ] = str ( job_def . get ( 'func' ) ) if isinstance ( job_def . get ( 'start_date' ) , six . string_types ) : job_def [ 'start_date' ] = dateutil . parser . parse ( job_def . get ( 'start_date' ) ) if isinstance ( job_def . get ( 'end_date' ) , six . string_types ) : job_def [ 'end_date' ] = dateutil . parser . parse ( job_def . get ( 'end_date' ) ) if isinstance ( job_def . get ( 'run_date' ) , six . string_types ) : job_def [ 'run_date' ] = dateutil . parser . parse ( job_def . get ( 'run_date' ) ) # it keeps compatibility backward if isinstance ( job_def . get ( 'trigger' ) , dict ) : trigger = job_def . pop ( 'trigger' ) job_def [ 'trigger' ] = trigger . pop ( 'type' , 'date' ) job_def . update ( trigger )
Replaces the datetime in string by datetime object .
410
12
225,612
def init_app ( self , app ) : self . app = app self . app . apscheduler = self self . _load_config ( ) self . _load_jobs ( ) if self . api_enabled : self . _load_api ( )
Initialize the APScheduler with a Flask application instance .
56
13
225,613
def add_listener ( self , callback , mask = EVENT_ALL ) : self . _scheduler . add_listener ( callback , mask )
Add a listener for scheduler events .
33
8
225,614
def add_job ( self , id , func , * * kwargs ) : job_def = dict ( kwargs ) job_def [ 'id' ] = id job_def [ 'func' ] = func job_def [ 'name' ] = job_def . get ( 'name' ) or id fix_job_def ( job_def ) return self . _scheduler . add_job ( * * job_def )
Add the given job to the job list and wakes up the scheduler if it s already running .
97
20
225,615
def delete_job ( self , id , jobstore = None ) : warnings . warn ( 'delete_job has been deprecated, use remove_job instead.' , DeprecationWarning ) self . remove_job ( id , jobstore )
DEPRECATED use remove_job instead .
50
9
225,616
def modify_job ( self , id , jobstore = None , * * changes ) : fix_job_def ( changes ) if 'trigger' in changes : trigger , trigger_args = pop_trigger ( changes ) self . _scheduler . reschedule_job ( id , jobstore , trigger , * * trigger_args ) return self . _scheduler . modify_job ( id , jobstore , * * changes )
Modify the properties of a single job . Modifications are passed to this method as extra keyword arguments .
93
21
225,617
def _load_config ( self ) : options = dict ( ) job_stores = self . app . config . get ( 'SCHEDULER_JOBSTORES' ) if job_stores : options [ 'jobstores' ] = job_stores executors = self . app . config . get ( 'SCHEDULER_EXECUTORS' ) if executors : options [ 'executors' ] = executors job_defaults = self . app . config . get ( 'SCHEDULER_JOB_DEFAULTS' ) if job_defaults : options [ 'job_defaults' ] = job_defaults timezone = self . app . config . get ( 'SCHEDULER_TIMEZONE' ) if timezone : options [ 'timezone' ] = timezone self . _scheduler . configure ( * * options ) self . auth = self . app . config . get ( 'SCHEDULER_AUTH' , self . auth ) self . api_enabled = self . app . config . get ( 'SCHEDULER_VIEWS_ENABLED' , self . api_enabled ) # for compatibility reason self . api_enabled = self . app . config . get ( 'SCHEDULER_API_ENABLED' , self . api_enabled ) self . api_prefix = self . app . config . get ( 'SCHEDULER_API_PREFIX' , self . api_prefix ) self . endpoint_prefix = self . app . config . get ( 'SCHEDULER_ENDPOINT_PREFIX' , self . endpoint_prefix ) self . allowed_hosts = self . app . config . get ( 'SCHEDULER_ALLOWED_HOSTS' , self . allowed_hosts )
Load the configuration from the Flask configuration .
402
8
225,618
def _load_jobs ( self ) : jobs = self . app . config . get ( 'SCHEDULER_JOBS' ) if not jobs : jobs = self . app . config . get ( 'JOBS' ) if jobs : for job in jobs : self . add_job ( * * job )
Load the job definitions from the Flask configuration .
67
9
225,619
def _load_api ( self ) : self . _add_url_route ( 'get_scheduler_info' , '' , api . get_scheduler_info , 'GET' ) self . _add_url_route ( 'add_job' , '/jobs' , api . add_job , 'POST' ) self . _add_url_route ( 'get_job' , '/jobs/<job_id>' , api . get_job , 'GET' ) self . _add_url_route ( 'get_jobs' , '/jobs' , api . get_jobs , 'GET' ) self . _add_url_route ( 'delete_job' , '/jobs/<job_id>' , api . delete_job , 'DELETE' ) self . _add_url_route ( 'update_job' , '/jobs/<job_id>' , api . update_job , 'PATCH' ) self . _add_url_route ( 'pause_job' , '/jobs/<job_id>/pause' , api . pause_job , 'POST' ) self . _add_url_route ( 'resume_job' , '/jobs/<job_id>/resume' , api . resume_job , 'POST' ) self . _add_url_route ( 'run_job' , '/jobs/<job_id>/run' , api . run_job , 'POST' )
Add the routes for the scheduler API .
323
9
225,620
def _handle_authentication_error ( self ) : response = make_response ( 'Access Denied' ) response . headers [ 'WWW-Authenticate' ] = self . auth . get_authenticate_header ( ) response . status_code = 401 return response
Return an authentication error .
58
5
225,621
def get_scheduler_info ( ) : scheduler = current_app . apscheduler d = OrderedDict ( [ ( 'current_host' , scheduler . host_name ) , ( 'allowed_hosts' , scheduler . allowed_hosts ) , ( 'running' , scheduler . running ) ] ) return jsonify ( d )
Gets the scheduler info .
80
7
225,622
def add_job ( ) : data = request . get_json ( force = True ) try : job = current_app . apscheduler . add_job ( * * data ) return jsonify ( job ) except ConflictingIdError : return jsonify ( dict ( error_message = 'Job %s already exists.' % data . get ( 'id' ) ) , status = 409 ) except Exception as e : return jsonify ( dict ( error_message = str ( e ) ) , status = 500 )
Adds a new job .
110
5
225,623
def get_job ( job_id ) : job = current_app . apscheduler . get_job ( job_id ) if not job : return jsonify ( dict ( error_message = 'Job %s not found' % job_id ) , status = 404 ) return jsonify ( job )
Gets a job .
66
5
225,624
def get_jobs ( ) : jobs = current_app . apscheduler . get_jobs ( ) job_states = [ ] for job in jobs : job_states . append ( job ) return jsonify ( job_states )
Gets all scheduled jobs .
50
6
225,625
def update_job ( job_id ) : data = request . get_json ( force = True ) try : current_app . apscheduler . modify_job ( job_id , * * data ) job = current_app . apscheduler . get_job ( job_id ) return jsonify ( job ) except JobLookupError : return jsonify ( dict ( error_message = 'Job %s not found' % job_id ) , status = 404 ) except Exception as e : return jsonify ( dict ( error_message = str ( e ) ) , status = 500 )
Updates a job .
128
5
225,626
def resume_job ( job_id ) : try : current_app . apscheduler . resume_job ( job_id ) job = current_app . apscheduler . get_job ( job_id ) return jsonify ( job ) except JobLookupError : return jsonify ( dict ( error_message = 'Job %s not found' % job_id ) , status = 404 ) except Exception as e : return jsonify ( dict ( error_message = str ( e ) ) , status = 500 )
Resumes a job .
112
5
225,627
def get_command ( self , ctx : click . Context , name : str ) -> click . Command : info = ctx . ensure_object ( ScriptInfo ) command = None try : command = info . load_app ( ) . cli . get_command ( ctx , name ) except NoAppException : pass if command is None : command = super ( ) . get_command ( ctx , name ) return command
Return the relevant command given the context and name .
90
10
225,628
async def render_template ( template_name_or_list : Union [ str , List [ str ] ] , * * context : Any ) -> str : await current_app . update_template_context ( context ) template = current_app . jinja_env . get_or_select_template ( template_name_or_list ) return await _render ( template , context )
Render the template with the context given .
84
8
225,629
async def render_template_string ( source : str , * * context : Any ) -> str : await current_app . update_template_context ( context ) template = current_app . jinja_env . from_string ( source ) return await _render ( template , context )
Render the template source with the context given .
62
9
225,630
def get_source ( self , environment : Environment , template : str , ) -> Tuple [ str , Optional [ str ] , Callable ] : for loader in self . _loaders ( ) : try : return loader . get_source ( environment , template ) except TemplateNotFound : continue raise TemplateNotFound ( template )
Returns the template source from the environment .
68
8
225,631
def list_templates ( self ) -> List [ str ] : result = set ( ) for loader in self . _loaders ( ) : for template in loader . list_templates ( ) : result . add ( str ( template ) ) return list ( result )
Returns a list of all avilable templates in environment .
56
12
225,632
def route ( self , path : str , methods : Optional [ List [ str ] ] = None , endpoint : Optional [ str ] = None , defaults : Optional [ dict ] = None , host : Optional [ str ] = None , subdomain : Optional [ str ] = None , * , provide_automatic_options : Optional [ bool ] = None , strict_slashes : bool = True , ) -> Callable : def decorator ( func : Callable ) -> Callable : self . add_url_rule ( path , endpoint , func , methods , defaults = defaults , host = host , subdomain = subdomain , provide_automatic_options = provide_automatic_options , strict_slashes = strict_slashes , ) return func return decorator
Add a route to the blueprint .
158
7
225,633
def websocket ( self , path : str , endpoint : Optional [ str ] = None , defaults : Optional [ dict ] = None , host : Optional [ str ] = None , subdomain : Optional [ str ] = None , * , strict_slashes : bool = True , ) -> Callable : def decorator ( func : Callable ) -> Callable : self . add_websocket ( path , endpoint , func , defaults = defaults , host = host , subdomain = subdomain , strict_slashes = strict_slashes , ) return func return decorator
Add a websocket to the blueprint .
120
8
225,634
def add_websocket ( self , path : str , endpoint : Optional [ str ] = None , view_func : Optional [ Callable ] = None , defaults : Optional [ dict ] = None , host : Optional [ str ] = None , subdomain : Optional [ str ] = None , * , strict_slashes : bool = True , ) -> None : return self . add_url_rule ( path , endpoint , view_func , { 'GET' } , defaults = defaults , host = host , subdomain = subdomain , provide_automatic_options = False , is_websocket = True , strict_slashes = strict_slashes , )
Add a websocket rule to the blueprint .
141
9
225,635
def endpoint ( self , endpoint : str ) -> Callable : def decorator ( func : Callable ) -> Callable : self . record_once ( lambda state : state . register_endpoint ( endpoint , func ) ) return func return decorator
Add an endpoint to the blueprint .
52
7
225,636
def before_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . before_request ( func , self . name ) ) return func
Add a before request function to the Blueprint .
42
9
225,637
def before_websocket ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . before_websocket ( func , self . name ) ) return func
Add a before request websocket to the Blueprint .
46
10
225,638
def before_app_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . before_request ( func ) ) return func
Add a before request function to the app .
40
9
225,639
def before_app_websocket ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . before_websocket ( func ) ) return func
Add a before request websocket to the App .
44
10
225,640
def before_app_first_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . before_first_request ( func ) ) return func
Add a before request first function to the app .
44
10
225,641
def after_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . after_request ( func , self . name ) ) return func
Add an after request function to the Blueprint .
42
9
225,642
def after_websocket ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . after_websocket ( func , self . name ) ) return func
Add an after websocket function to the Blueprint .
46
10
225,643
def after_app_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . after_request ( func ) ) return func
Add a after request function to the app .
40
9
225,644
def after_app_websocket ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . after_websocket ( func ) ) return func
Add an after websocket function to the App .
44
10
225,645
def teardown_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . teardown_request ( func , self . name ) ) return func
Add a teardown request function to the Blueprint .
46
11
225,646
def teardown_websocket ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . teardown_websocket ( func , self . name ) ) return func
Add a teardown websocket function to the Blueprint .
50
12
225,647
def teardown_app_request ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . teardown_request ( func ) ) return func
Add a teardown request function to the app .
44
11
225,648
def errorhandler ( self , error : Union [ Type [ Exception ] , int ] ) -> Callable : def decorator ( func : Callable ) -> Callable : self . register_error_handler ( error , func ) return func return decorator
Add an error handler function to the Blueprint .
52
9
225,649
def app_errorhandler ( self , error : Union [ Type [ Exception ] , int ] ) -> Callable : def decorator ( func : Callable ) -> Callable : self . record_once ( lambda state : state . app . register_error_handler ( error , func ) ) return func return decorator
Add an error handler function to the App .
66
9
225,650
def register_error_handler ( self , error : Union [ Type [ Exception ] , int ] , func : Callable ) -> None : self . record_once ( lambda state : state . app . register_error_handler ( error , func , self . name ) )
Add an error handler function to the blueprint .
57
9
225,651
def context_processor ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . context_processor ( func , self . name ) ) return func
Add a context processor function to this blueprint .
42
9
225,652
def app_context_processor ( self , func : Callable ) -> Callable : self . record_once ( lambda state : state . app . context_processor ( func ) ) return func
Add a context processor function to the app .
40
9
225,653
def record_once ( self , func : DeferedSetupFunction ) -> None : def wrapper ( state : 'BlueprintSetupState' ) -> None : if state . first_registration : func ( state ) self . record ( update_wrapper ( wrapper , func ) )
Used to register a deferred action that happens only once .
57
11
225,654
def register ( self , app : 'Quart' , first_registration : bool , * , url_prefix : Optional [ str ] = None , ) -> None : state = self . make_setup_state ( app , first_registration , url_prefix = url_prefix ) if self . has_static_folder : state . add_url_rule ( self . static_url_path + '/<path:filename>' , view_func = self . send_static_file , endpoint = 'static' , ) for func in self . deferred_functions : func ( state )
Register this blueprint on the app given .
127
8
225,655
def make_setup_state ( self , app : 'Quart' , first_registration : bool , * , url_prefix : Optional [ str ] = None , ) -> 'BlueprintSetupState' : return BlueprintSetupState ( self , app , first_registration , url_prefix = url_prefix )
Return a blueprint setup state instance .
67
7
225,656
def to_dict ( self , flat : bool = True ) -> Dict [ Any , Any ] : if flat : return { key : value for key , value in self . items ( ) } # type: ignore else : return { key : self . getall ( key ) for key in self }
Convert the multidict to a plain dictionary .
63
11
225,657
def save ( self , destination : BinaryIO , buffer_size : int = 16384 ) -> None : close_destination = False if isinstance ( destination , str ) : destination = open ( destination , 'wb' ) close_destination = True try : copyfileobj ( self . stream , destination , buffer_size ) finally : if close_destination : destination . close ( )
Save the file to the destination .
82
7
225,658
async def get_data ( self , raw : bool = True ) -> AnyStr : try : body_future = asyncio . ensure_future ( self . body ) raw_data = await asyncio . wait_for ( body_future , timeout = self . body_timeout ) except asyncio . TimeoutError : body_future . cancel ( ) from . . exceptions import RequestTimeout # noqa Avoiding circular import raise RequestTimeout ( ) if raw : return raw_data else : return raw_data . decode ( self . charset )
The request body data .
115
5
225,659
async def accept ( self , headers : Optional [ Union [ dict , CIMultiDict , Headers ] ] = None , subprotocol : Optional [ str ] = None , ) -> None : if headers is None : headers_ = Headers ( ) else : headers_ = Headers ( headers ) await self . _accept ( headers_ , subprotocol )
Manually chose to accept the websocket connection .
79
10
225,660
def create_logger ( app : 'Quart' ) -> Logger : logger = getLogger ( 'quart.app' ) if app . debug and logger . level == NOTSET : logger . setLevel ( DEBUG ) logger . addHandler ( default_handler ) return logger
Create a logger for the app based on the app settings .
59
12
225,661
def create_serving_logger ( ) -> Logger : logger = getLogger ( 'quart.serving' ) if logger . level == NOTSET : logger . setLevel ( INFO ) logger . addHandler ( serving_handler ) return logger
Create a logger for serving .
51
6
225,662
def create_cookie ( key : str , value : str = '' , max_age : Optional [ Union [ int , timedelta ] ] = None , expires : Optional [ Union [ int , float , datetime ] ] = None , path : str = '/' , domain : Optional [ str ] = None , secure : bool = False , httponly : bool = False , ) -> SimpleCookie : cookie = SimpleCookie ( ) cookie [ key ] = value cookie [ key ] [ 'path' ] = path cookie [ key ] [ 'httponly' ] = httponly # type: ignore cookie [ key ] [ 'secure' ] = secure # type: ignore if isinstance ( max_age , timedelta ) : cookie [ key ] [ 'max-age' ] = f"{max_age.total_seconds():d}" if isinstance ( max_age , int ) : cookie [ key ] [ 'max-age' ] = str ( max_age ) if expires is not None and isinstance ( expires , ( int , float ) ) : cookie [ key ] [ 'expires' ] = format_date_time ( int ( expires ) ) elif expires is not None and isinstance ( expires , datetime ) : cookie [ key ] [ 'expires' ] = format_date_time ( expires . replace ( tzinfo = timezone . utc ) . timestamp ( ) ) if domain is not None : cookie [ key ] [ 'domain' ] = domain return cookie
Create a Cookie given the options set
317
7
225,663
def name ( self ) -> str : if self . import_name == '__main__' : path = Path ( getattr ( sys . modules [ '__main__' ] , '__file__' , '__main__.py' ) ) return path . stem return self . import_name
The name of this application .
64
6
225,664
def jinja_env ( self ) -> Environment : if self . _jinja_env is None : self . _jinja_env = self . create_jinja_environment ( ) return self . _jinja_env
The jinja environment used to load templates .
49
10
225,665
def auto_find_instance_path ( self ) -> Path : prefix , package_path = find_package ( self . import_name ) if prefix is None : return package_path / "instance" return prefix / "var" / f"{self.name}-instance"
Locates the instace_path if it was not provided
60
12
225,666
def make_config ( self , instance_relative : bool = False ) -> Config : config = self . config_class ( self . instance_path if instance_relative else self . root_path , DEFAULT_CONFIG , ) config [ 'ENV' ] = get_env ( ) config [ 'DEBUG' ] = get_debug_flag ( ) return config
Create and return the configuration with appropriate defaults .
78
9
225,667
def create_url_adapter ( self , request : Optional [ BaseRequestWebsocket ] ) -> Optional [ MapAdapter ] : if request is not None : host = request . host return self . url_map . bind_to_request ( request . scheme , host , request . method , request . path , request . query_string , ) if self . config [ 'SERVER_NAME' ] is not None : return self . url_map . bind ( self . config [ 'PREFERRED_URL_SCHEME' ] , self . config [ 'SERVER_NAME' ] , ) return None
Create and return a URL adapter .
131
7
225,668
def create_jinja_environment ( self ) -> Environment : options = dict ( self . jinja_options ) if 'autoescape' not in options : options [ 'autoescape' ] = self . select_jinja_autoescape if 'auto_reload' not in options : options [ 'auto_reload' ] = self . config [ 'TEMPLATES_AUTO_RELOAD' ] or self . debug jinja_env = self . jinja_environment ( self , * * options ) jinja_env . globals . update ( { 'config' : self . config , 'g' : g , 'get_flashed_messages' : get_flashed_messages , 'request' : request , 'session' : session , 'url_for' : url_for , } ) jinja_env . filters [ 'tojson' ] = tojson_filter return jinja_env
Create and return the jinja environment .
208
9
225,669
def select_jinja_autoescape ( self , filename : str ) -> bool : if filename is None : return True return Path ( filename ) . suffix in { '.htm' , '.html' , '.xhtml' , '.xml' }
Returns True if the filename indicates that it should be escaped .
52
12
225,670
async def update_template_context ( self , context : dict ) -> None : processors = self . template_context_processors [ None ] if has_request_context ( ) : blueprint = _request_ctx_stack . top . request . blueprint if blueprint is not None and blueprint in self . template_context_processors : processors = chain ( processors , self . template_context_processors [ blueprint ] ) # type: ignore # noqa extra_context : dict = { } for processor in processors : extra_context . update ( await processor ( ) ) original = context . copy ( ) context . update ( extra_context ) context . update ( original )
Update the provided template context .
141
6
225,671
def make_shell_context ( self ) -> dict : context = { 'app' : self , 'g' : g } for processor in self . shell_context_processors : context . update ( processor ( ) ) return context
Create a context for interactive shell usage .
49
8
225,672
def endpoint ( self , endpoint : str ) -> Callable : def decorator ( func : Callable ) -> Callable : handler = ensure_coroutine ( func ) self . view_functions [ endpoint ] = handler return func return decorator
Register a function as an endpoint .
51
7
225,673
def register_error_handler ( self , error : Union [ Type [ Exception ] , int ] , func : Callable , name : AppOrBlueprintKey = None , ) -> None : handler = ensure_coroutine ( func ) if isinstance ( error , int ) : error = all_http_exceptions [ error ] self . error_handler_spec [ name ] [ error ] = handler
Register a function as an error handler .
84
8
225,674
def context_processor ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : self . template_context_processors [ name ] . append ( ensure_coroutine ( func ) ) return func
Add a template context processor .
50
6
225,675
def shell_context_processor ( self , func : Callable ) -> Callable : self . shell_context_processors . append ( func ) return func
Add a shell context processor .
33
6
225,676
def inject_url_defaults ( self , endpoint : str , values : dict ) -> None : functions = self . url_value_preprocessors [ None ] if '.' in endpoint : blueprint = endpoint . rsplit ( '.' , 1 ) [ 0 ] functions = chain ( functions , self . url_value_preprocessors [ blueprint ] ) # type: ignore for function in functions : function ( endpoint , values )
Injects default URL values into the passed values dict .
90
12
225,677
def handle_url_build_error ( self , error : Exception , endpoint : str , values : dict ) -> str : for handler in self . url_build_error_handlers : result = handler ( error , endpoint , values ) if result is not None : return result raise error
Handle a build error .
60
5
225,678
async def handle_http_exception ( self , error : Exception ) -> Response : handler = self . _find_exception_handler ( error ) if handler is None : return error . get_response ( ) # type: ignore else : return await handler ( error )
Handle a HTTPException subclass error .
58
7
225,679
async def handle_user_exception ( self , error : Exception ) -> Response : if isinstance ( error , HTTPException ) and not self . trap_http_exception ( error ) : return await self . handle_http_exception ( error ) handler = self . _find_exception_handler ( error ) if handler is None : raise error return await handler ( error )
Handle an exception that has been raised .
82
8
225,680
def before_request ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . before_request_funcs [ name ] . append ( handler ) return func
Add a before request function .
53
6
225,681
def before_websocket ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . before_websocket_funcs [ name ] . append ( handler ) return func
Add a before websocket function .
57
7
225,682
def before_serving ( self , func : Callable ) -> Callable : handler = ensure_coroutine ( func ) self . before_serving_funcs . append ( handler ) return func
Add a before serving function .
40
6
225,683
def after_request ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . after_request_funcs [ name ] . append ( handler ) return func
Add an after request function .
53
6
225,684
def after_websocket ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . after_websocket_funcs [ name ] . append ( handler ) return func
Add an after websocket function .
57
7
225,685
def after_serving ( self , func : Callable ) -> Callable : handler = ensure_coroutine ( func ) self . after_serving_funcs . append ( handler ) return func
Add a after serving function .
40
6
225,686
def teardown_request ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . teardown_request_funcs [ name ] . append ( handler ) return func
Add a teardown request function .
57
8
225,687
def teardown_websocket ( self , func : Callable , name : AppOrBlueprintKey = None ) -> Callable : handler = ensure_coroutine ( func ) self . teardown_websocket_funcs [ name ] . append ( handler ) return func
Add a teardown websocket function .
61
9
225,688
def register_blueprint ( self , blueprint : Blueprint , url_prefix : Optional [ str ] = None ) -> None : first_registration = False if blueprint . name in self . blueprints and self . blueprints [ blueprint . name ] is not blueprint : raise RuntimeError ( f"Blueprint name '{blueprint.name}' " f"is already registered by {self.blueprints[blueprint.name]}. " "Blueprints must have unique names" , ) else : self . blueprints [ blueprint . name ] = blueprint first_registration = True blueprint . register ( self , first_registration , url_prefix = url_prefix )
Register a blueprint on the app .
140
7
225,689
async def open_session ( self , request : BaseRequestWebsocket ) -> Session : return await ensure_coroutine ( self . session_interface . open_session ) ( self , request )
Open and return a Session using the request .
42
9
225,690
async def save_session ( self , session : Session , response : Response ) -> None : await ensure_coroutine ( self . session_interface . save_session ) ( self , session , response )
Saves the session to the response .
43
8
225,691
async def do_teardown_request ( self , exc : Optional [ BaseException ] , request_context : Optional [ RequestContext ] = None , ) -> None : request_ = ( request_context or _request_ctx_stack . top ) . request functions = self . teardown_request_funcs [ None ] blueprint = request_ . blueprint if blueprint is not None : functions = chain ( functions , self . teardown_request_funcs [ blueprint ] ) # type: ignore for function in functions : await function ( exc = exc ) await request_tearing_down . send ( self , exc = exc )
Teardown the request calling the teardown functions .
135
12
225,692
async def do_teardown_websocket ( self , exc : Optional [ BaseException ] , websocket_context : Optional [ WebsocketContext ] = None , ) -> None : websocket_ = ( websocket_context or _websocket_ctx_stack . top ) . websocket functions = self . teardown_websocket_funcs [ None ] blueprint = websocket_ . blueprint if blueprint is not None : functions = chain ( functions , self . teardown_websocket_funcs [ blueprint ] ) # type: ignore for function in functions : await function ( exc = exc ) await websocket_tearing_down . send ( self , exc = exc )
Teardown the websocket calling the teardown functions .
150
13
225,693
def run ( self , host : str = '127.0.0.1' , port : int = 5000 , debug : Optional [ bool ] = None , use_reloader : bool = True , loop : Optional [ asyncio . AbstractEventLoop ] = None , ca_certs : Optional [ str ] = None , certfile : Optional [ str ] = None , keyfile : Optional [ str ] = None , * * kwargs : Any , ) -> None : if kwargs : warnings . warn ( f"Additional arguments, {','.join(kwargs.keys())}, are not supported.\n" "They may be supported by Hypercorn, which is the ASGI server Quart " "uses by default. This method is meant for development and debugging." ) config = HyperConfig ( ) config . access_log_format = "%(h)s %(r)s %(s)s %(b)s %(D)s" config . access_logger = create_serving_logger ( ) # type: ignore config . bind = [ f"{host}:{port}" ] config . ca_certs = ca_certs config . certfile = certfile if debug is not None : self . debug = debug config . error_logger = config . access_logger # type: ignore config . keyfile = keyfile config . use_reloader = use_reloader scheme = 'https' if config . ssl_enabled else 'http' print ( "Running on {}://{} (CTRL + C to quit)" . format ( scheme , config . bind [ 0 ] ) ) # noqa: T001 if loop is not None : loop . set_debug ( debug or False ) loop . run_until_complete ( serve ( self , config ) ) else : asyncio . run ( serve ( self , config ) , debug = config . debug )
Run this application .
405
4
225,694
async def try_trigger_before_first_request_functions ( self ) -> None : if self . _got_first_request : return # Reverse the teardown functions, so as to match the expected usage self . teardown_appcontext_funcs = list ( reversed ( self . teardown_appcontext_funcs ) ) for key , value in self . teardown_request_funcs . items ( ) : self . teardown_request_funcs [ key ] = list ( reversed ( value ) ) for key , value in self . teardown_websocket_funcs . items ( ) : self . teardown_websocket_funcs [ key ] = list ( reversed ( value ) ) async with self . _first_request_lock : if self . _got_first_request : return for function in self . before_first_request_funcs : await function ( ) self . _got_first_request = True
Trigger the before first request methods .
212
7
225,695
async def make_default_options_response ( self ) -> Response : methods = _request_ctx_stack . top . url_adapter . allowed_methods ( ) return self . response_class ( '' , headers = { 'Allow' : ', ' . join ( methods ) } )
This is the default route function for OPTIONS requests .
63
11
225,696
async def make_response ( self , result : ResponseReturnValue ) -> Response : status_or_headers = None headers = None status = None if isinstance ( result , tuple ) : value , status_or_headers , headers = result + ( None , ) * ( 3 - len ( result ) ) else : value = result if value is None : raise TypeError ( 'The response value returned by the view function cannot be None' ) if isinstance ( status_or_headers , ( dict , list ) ) : headers = status_or_headers status = None elif status_or_headers is not None : status = status_or_headers if not isinstance ( value , Response ) : response = self . response_class ( # type: ignore value , timeout = self . config [ 'RESPONSE_TIMEOUT' ] , ) else : response = value if status is not None : response . status_code = status # type: ignore if headers is not None : response . headers . update ( headers ) # type: ignore return response
Make a Response from the result of the route handler .
222
11
225,697
async def full_dispatch_request ( self , request_context : Optional [ RequestContext ] = None , ) -> Response : await self . try_trigger_before_first_request_functions ( ) await request_started . send ( self ) try : result = await self . preprocess_request ( request_context ) if result is None : result = await self . dispatch_request ( request_context ) except Exception as error : result = await self . handle_user_exception ( error ) return await self . finalize_request ( result , request_context )
Adds pre and post processing to the request dispatching .
122
11
225,698
async def preprocess_request ( self , request_context : Optional [ RequestContext ] = None , ) -> Optional [ ResponseReturnValue ] : request_ = ( request_context or _request_ctx_stack . top ) . request blueprint = request_ . blueprint processors = self . url_value_preprocessors [ None ] if blueprint is not None : processors = chain ( processors , self . url_value_preprocessors [ blueprint ] ) # type: ignore for processor in processors : processor ( request . endpoint , request . view_args ) functions = self . before_request_funcs [ None ] if blueprint is not None : functions = chain ( functions , self . before_request_funcs [ blueprint ] ) # type: ignore for function in functions : result = await function ( ) if result is not None : return result return None
Preprocess the request i . e . call before_request functions .
178
14
225,699
async def dispatch_request ( self , request_context : Optional [ RequestContext ] = None , ) -> ResponseReturnValue : request_ = ( request_context or _request_ctx_stack . top ) . request if request_ . routing_exception is not None : raise request_ . routing_exception if request_ . method == 'OPTIONS' and request_ . url_rule . provide_automatic_options : return await self . make_default_options_response ( ) handler = self . view_functions [ request_ . url_rule . endpoint ] return await handler ( * * request_ . view_args )
Dispatch the request to the view function .
135
8