idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
26,200 | def collect ( since , to , top = DEFAULT_TOP ) : summary = CspReportSummary ( since , to , top = top ) queryset = CSPReport . objects . filter ( created__range = ( since , to ) ) valid_queryset = queryset . filter ( is_valid = True ) invalid_queryset = queryset . filter ( is_valid = False ) summary . total_count = quer... | Collect the CSP report . |
26,201 | def append ( self , report ) : assert report not in self . examples self . count += 1 if len ( self . examples ) < self . top : self . examples . append ( report ) | Append a new CSP report . |
26,202 | def render ( self ) : engine = Engine ( ) return engine . from_string ( SUMMARY_TEMPLATE ) . render ( Context ( self . __dict__ ) ) | Render the summary . |
26,203 | def _parse_date_input ( date_input , default_offset = 0 ) : if date_input : try : return parse_date_input ( date_input ) except ValueError as err : raise CommandError ( force_text ( err ) ) else : return get_midnight ( ) - timedelta ( days = default_offset ) | Parses a date input . |
26,204 | def nice_report ( self ) : if not self . json : return '[no CSP report data]' try : data = json . loads ( self . json ) except ValueError : return "Invalid CSP report: '{}'" . format ( self . json ) if 'csp-report' not in data : return 'Invalid CSP report: ' + json . dumps ( data , indent = 4 , sort_keys = True , separ... | Return a nicely formatted original report . |
26,205 | def from_message ( cls , message ) : self = cls ( json = message ) try : decoded_data = json . loads ( message ) except ValueError : return self try : report_data = decoded_data [ 'csp-report' ] except KeyError : return self for report_name , field_name in REQUIRED_FIELD_MAP + OPTIONAL_FIELD_MAP : setattr ( self , fiel... | Creates an instance from CSP report message . |
26,206 | def data ( self ) : try : data = self . _data except AttributeError : data = self . _data = json . loads ( self . json ) return data | Returns self . json loaded as a python object . |
26,207 | def json_as_html ( self ) : from cspreports import utils formatted_json = utils . format_report ( self . json ) return mark_safe ( "<pre>\n%s</pre>" % escape ( formatted_json ) ) | Print out self . json in a nice way . |
26,208 | def process_report ( request ) : if config . EMAIL_ADMINS : email_admins ( request ) if config . LOG : log_report ( request ) if config . SAVE : save_report ( request ) if config . ADDITIONAL_HANDLERS : run_additional_handlers ( request ) | Given the HTTP request of a CSP violation report log it in the required ways . |
26,209 | def get_additional_handlers ( ) : global _additional_handlers if not isinstance ( _additional_handlers , list ) : handlers = [ ] for name in config . ADDITIONAL_HANDLERS : module_name , function_name = name . rsplit ( '.' , 1 ) function = getattr ( import_module ( module_name ) , function_name ) handlers . append ( fun... | Returns the actual functions from the dotted paths specified in ADDITIONAL_HANDLERS . |
26,210 | def parse_date_input ( value ) : try : limit = parse_date ( value ) except ValueError : limit = None if limit is None : raise ValueError ( "'{}' is not a valid date." . format ( value ) ) limit = datetime ( limit . year , limit . month , limit . day ) if settings . USE_TZ : limit = make_aware ( limit ) return limit | Return datetime based on the user s input . |
26,211 | def get_midnight ( ) : limit = now ( ) if settings . USE_TZ : limit = localtime ( limit ) return limit . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) | Return last midnight in localtime as datetime . |
26,212 | def python_job ( self , function , parameters = None ) : if not callable ( function ) : raise utils . StimelaCabRuntimeError ( 'Object given as function is not callable' ) if self . name is None : self . name = function . __name__ self . job = { 'function' : function , 'parameters' : parameters , } return 0 | Run python function |
26,213 | def pull ( image , store_path , docker = True ) : if docker : fp = "docker://{0:s}" . format ( image ) else : fp = image utils . xrun ( "singularity" , [ "pull" , "--force" , "--name" , store_path , fp ] ) return 0 | pull an image |
26,214 | def start ( self , * args ) : if self . volumes : volumes = " --bind " + " --bind " . join ( self . volumes ) else : volumes = "" self . _print ( "Instantiating container [{0:s}]. Timeout set to {1:d}. The container ID is printed below." . format ( self . name , self . time_out ) ) utils . xrun ( "singularity instance.... | Create a singularity container instance |
26,215 | def run ( self , * args ) : if self . volumes : volumes = " --bind " + " --bind " . join ( self . volumes ) else : volumes = "" self . _print ( "Starting container [{0:s}]. Timeout set to {1:d}. The container ID is printed below." . format ( self . name , self . time_out ) ) utils . xrun ( "singularity run" , [ "instan... | Run a singularity container instance |
26,216 | def stop ( self , * args ) : if self . volumes : volumes = " --bind " + " --bind " . join ( self . volumes ) else : volumes = "" self . _print ( "Stopping container [{}]. The container ID is printed below." . format ( self . name ) ) utils . xrun ( "singularity" , [ "instance.stop {0:s}" . format ( self . name ) ] ) se... | Stop a singularity container instance |
26,217 | def build ( image , build_path , tag = None , build_args = None , fromline = None , args = [ ] ) : if tag : image = ":" . join ( [ image , tag ] ) bdir = tempfile . mkdtemp ( ) os . system ( 'cp -r {0:s}/* {1:s}' . format ( build_path , bdir ) ) if build_args : stdw = tempfile . NamedTemporaryFile ( dir = bdir , mode =... | build a docker image |
26,218 | def pull ( image , tag = None ) : if tag : image = ":" . join ( [ image , tag ] ) utils . xrun ( "docker pull" , [ image ] ) | pull a docker image |
26,219 | def info ( cabdir , header = False ) : pfile = "{}/parameters.json" . format ( cabdir ) if not os . path . exists ( pfile ) : raise RuntimeError ( "Cab could not be found at : {}" . format ( cabdir ) ) cab_definition = cab . CabDefinition ( parameter_file = pfile ) cab_definition . display ( header ) | prints out help information about a cab |
26,220 | def xrun ( command , options , log = None , _log_container_as_started = False , logfile = None , timeout = - 1 , kill_callback = None ) : cmd = " " . join ( [ command ] + list ( map ( str , options ) ) ) def _print_info ( msg ) : if msg is None : return if log : log . info ( msg ) else : print ( msg ) def _print_warn (... | Run something on command line . |
26,221 | def sumcols ( msname , col1 = None , col2 = None , outcol = None , cols = None , suntract = False ) : from pyrap . tables import table tab = table ( msname , readonly = False ) if cols : data = 0 for col in cols : data += tab . getcol ( col ) else : if subtract : data = tab . getcol ( col1 ) - tab . getcol ( col2 ) els... | add col1 to col2 or sum columns in cols list . If subtract subtract col2 from col1 |
26,222 | def compute_vis_noise ( msname , sefd , spw_id = 0 ) : from pyrap . tables import table tab = table ( msname ) spwtab = table ( msname + "/SPECTRAL_WINDOW" ) freq0 = spwtab . getcol ( "CHAN_FREQ" ) [ spw_id , 0 ] wavelength = 300e+6 / freq0 bw = spwtab . getcol ( "CHAN_WIDTH" ) [ spw_id , 0 ] dt = tab . getcol ( "EXPOS... | Computes nominal per - visibility noise |
26,223 | def fitsInfo ( fitsname = None ) : hdu = pyfits . open ( fitsname ) hdr = hdu [ 0 ] . header ra = hdr [ 'CRVAL1' ] dra = abs ( hdr [ 'CDELT1' ] ) raPix = hdr [ 'CRPIX1' ] dec = hdr [ 'CRVAL2' ] ddec = abs ( hdr [ 'CDELT2' ] ) decPix = hdr [ 'CRPIX2' ] freq0 = 0 for i in range ( 1 , hdr [ 'NAXIS' ] + 1 ) : if hdr [ 'CTY... | Get fits info |
26,224 | def sky2px ( wcs , ra , dec , dra , ddec , cell , beam ) : dra = beam if dra < beam else dra ddec = beam if ddec < beam else ddec offsetDec = int ( ( ddec / 2. ) / cell ) offsetRA = int ( ( dra / 2. ) / cell ) if offsetDec % 2 == 1 : offsetDec += 1 if offsetRA % 2 == 1 : offsetRA += 1 raPix , decPix = map ( int , wcs .... | convert a sky region to pixel positions |
26,225 | def get_components ( self , root = 'C' , visible = False ) : root_val = note_to_val ( root ) components = [ v + root_val for v in self . components ] if visible : components = [ val_to_note ( c , scale = root ) for c in components ] return components | Get components of chord quality |
26,226 | def append_on_chord ( self , on_chord , root ) : root_val = note_to_val ( root ) on_chord_val = note_to_val ( on_chord ) - root_val list_ = list ( self . components ) for idx , val in enumerate ( list_ ) : if val % 12 == on_chord_val : self . components . remove ( val ) break if on_chord_val > root_val : on_chord_val -... | Append on chord |
26,227 | def append_note ( self , note , root , scale = 0 ) : root_val = note_to_val ( root ) note_val = note_to_val ( note ) - root_val + scale * 12 if note_val not in self . components : self . components . append ( note_val ) self . components . sort ( ) | Append a note to quality |
26,228 | def append_notes ( self , notes , root , scale = 0 ) : for note in notes : self . append_note ( note , root , scale ) | Append notes to quality |
26,229 | def insert ( self , index , chord ) : self . _chords . insert ( index , as_chord ( chord ) ) | Insert a chord to chord progressions |
26,230 | def as_chord ( chord ) : if isinstance ( chord , Chord ) : return chord elif isinstance ( chord , str ) : return Chord ( chord ) else : raise TypeError ( "input type should be str or Chord instance." ) | convert from str to Chord instance if input is str |
26,231 | def transpose ( self , trans , scale = "C" ) : if not isinstance ( trans , int ) : raise TypeError ( "Expected integers, not {}" . format ( type ( trans ) ) ) self . _root = transpose_note ( self . _root , trans , scale ) if self . _on : self . _on = transpose_note ( self . _on , trans , scale ) self . _reconfigure_cho... | Transpose the chord |
26,232 | def components ( self , visible = True ) : if self . _on : self . _quality . append_on_chord ( self . on , self . root ) return self . _quality . get_components ( root = self . _root , visible = visible ) | Return the component notes of chord |
26,233 | def _parse ( self , chord ) : root , quality , appended , on = parse ( chord ) self . _root = root self . _quality = quality self . _appended = appended self . _on = on | parse a chord |
26,234 | def transpose_note ( note , transpose , scale = "C" ) : val = note_to_val ( note ) val += transpose return val_to_note ( val , scale ) | Transpose a note |
26,235 | def parse ( chord ) : if len ( chord ) > 1 and chord [ 1 ] in ( "b" , "#" ) : root = chord [ : 2 ] rest = chord [ 2 : ] else : root = chord [ : 1 ] rest = chord [ 1 : ] check_note ( root , chord ) on_chord_idx = rest . find ( "/" ) if on_chord_idx >= 0 : on = rest [ on_chord_idx + 1 : ] rest = rest [ : on_chord_idx ] c... | Parse a string to get chord component |
26,236 | def check_note ( note , chord ) : if note not in NOTE_VAL_DICT : raise ValueError ( "Invalid chord {}: Unknown note {}" . format ( chord , note ) ) return True | Return True if the note is valid . |
26,237 | def note_to_chord ( notes ) : if not notes : raise ValueError ( "Please specify notes which consist a chord." ) root = notes [ 0 ] root_and_positions = [ ] for rotated_notes in get_all_rotated_notes ( notes ) : rotated_root = rotated_notes [ 0 ] root_and_positions . append ( [ rotated_root , notes_to_positions ( rotate... | Convert note list to chord list |
26,238 | def notes_to_positions ( notes , root ) : root_pos = note_to_val ( root ) current_pos = root_pos positions = [ ] for note in notes : note_pos = note_to_val ( note ) if note_pos < current_pos : note_pos += 12 * ( ( current_pos - note_pos ) // 12 + 1 ) positions . append ( note_pos - root_pos ) current_pos = note_pos ret... | Get notes positions . |
26,239 | def get_all_rotated_notes ( notes ) : notes_list = [ ] for x in range ( len ( notes ) ) : notes_list . append ( notes [ x : ] + notes [ : x ] ) return notes_list | Get all rotated notes |
26,240 | def find_quality ( positions ) : for q , p in QUALITY_DICT . items ( ) : if positions == list ( p ) : return q return None | Find a quality consists of positions |
26,241 | def configure ( self , ns , mappings = None , ** kwargs ) : if mappings is None : mappings = dict ( ) mappings . update ( kwargs ) for operation , definition in mappings . items ( ) : try : configure_func = self . _find_func ( operation ) except AttributeError : pass else : configure_func ( ns , self . _make_definition... | Apply mappings to a namespace . |
26,242 | def _find_func ( self , operation ) : if isinstance ( operation , Operation ) : operation_name = operation . name . lower ( ) else : operation_name = operation . lower ( ) return getattr ( self , "configure_{}" . format ( operation_name ) ) | Find the function to use to configure the given operation . |
26,243 | def _make_definition ( self , definition ) : if not definition : return EndpointDefinition ( ) if isinstance ( definition , EndpointDefinition ) : return definition elif len ( definition ) == 1 : return EndpointDefinition ( func = definition [ 0 ] , ) elif len ( definition ) == 2 : return EndpointDefinition ( func = de... | Generate a definition . |
26,244 | def iter_links ( operations , page ) : for operation , ns , rule , func in operations : yield Link . for_ ( operation = operation , ns = ns , type = ns . subject_name , qs = page . to_items ( ) , ) | Generate links for an iterable of operations on a starting page . |
26,245 | def configure_discovery ( graph ) : ns = Namespace ( subject = graph . config . discovery_convention . name , ) convention = DiscoveryConvention ( graph ) convention . configure ( ns , discover = tuple ( ) ) return ns . subject | Build a singleton endpoint that provides a link to all search endpoints . |
26,246 | def configure_discover ( self , ns , definition ) : page_schema = OffsetLimitPageSchema ( ) @ self . add_route ( "/" , Operation . Discover , ns ) def discover ( ) : page = OffsetLimitPage . from_query_string ( page_schema ) page . offset = 0 response_data = dict ( _links = Links ( { "self" : Link . for_ ( Operation . ... | Register a discovery endpoint for a set of operations . |
26,247 | def nested ( * contexts ) : with ExitStack ( ) as stack : results = [ stack . enter_context ( context ) for context in contexts ] yield results | Reimplementation of nested in python 3 . |
26,248 | def temporary_upload ( name , fileobj ) : tempdir = mkdtemp ( ) filename = secure_filename ( fileobj . filename ) filepath = join ( tempdir , filename ) fileobj . save ( filepath ) try : yield name , filepath , fileobj . filename finally : rmtree ( tempdir ) | Upload a file to a temporary location . |
26,249 | def configure_upload ( graph , ns , mappings , exclude_func = None ) : convention = UploadConvention ( graph , exclude_func ) convention . configure ( ns , mappings ) | Register Upload endpoints for a resource object . |
26,250 | def configure_upload ( self , ns , definition ) : upload = self . create_upload_func ( ns , definition , ns . collection_path , Operation . Upload ) upload . __doc__ = "Upload a {}" . format ( ns . subject_name ) | Register an upload endpoint . |
26,251 | def configure_uploadfor ( self , ns , definition ) : upload_for = self . create_upload_func ( ns , definition , ns . relation_path , Operation . UploadFor ) upload_for . __doc__ = "Upload a {} for a {}" . format ( ns . subject_name , ns . object_name ) | Register an upload - for relation endpoint . |
26,252 | def build_etag ( self , response , include_etag = True , ** kwargs ) : if not include_etag : return if not spooky : response . add_etag ( ) return response . headers [ "ETag" ] = quote_etag ( hexlify ( spooky . hash128 ( response . get_data ( ) , ) . to_bytes ( 16 , "little" ) , ) . decode ( "utf-8" ) , ) | Add an etag to the response body . |
26,253 | def update_and_reencrypt ( self , ** kwargs ) : encrypted_field_name = self . store . model_class . __plaintext__ id_ = kwargs [ self . identifier_key ] current_model = self . store . retrieve ( id_ ) current_value = current_model . plaintext null_update = ( encrypted_field_name in kwargs and kwargs . get ( encrypted_f... | Support re - encryption by enforcing that every update triggers a new encryption call even if the the original call does not update the encrypted field . |
26,254 | def unflatten ( self , obj ) : obj . substitutions = [ dict ( from_id = key , to_id = value ) for key , value in getattr ( obj , "substitutions" , { } ) . items ( ) ] | Translate substitutions dictionary into objects . |
26,255 | def clone ( self , substitutions , commit = True , ** kwargs ) : return self . store . clone ( substitutions , ** kwargs ) | Clone a DAG optionally skipping the commit . |
26,256 | def configure_createfor ( self , ns , definition ) : @ self . add_route ( ns . relation_path , Operation . CreateFor , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def create ( ** path_data ) : request_data = load_request_data ( definition . re... | Register a create - for relation endpoint . |
26,257 | def configure_deletefor ( self , ns , definition ) : @ self . add_route ( ns . relation_path , Operation . DeleteFor , ns ) @ wraps ( definition . func ) def delete ( ** path_data ) : headers = dict ( ) response_data = dict ( ) require_response_data ( definition . func ( ** path_data ) ) definition . header_func ( head... | Register a delete - for relation endpoint . |
26,258 | def configure_replacefor ( self , ns , definition ) : @ self . add_route ( ns . relation_path , Operation . ReplaceFor , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def replace ( ** path_data ) : headers = dict ( ) request_data = load_request_... | Register a replace - for relation endpoint . |
26,259 | def build ( self , field : Field ) -> Mapping [ str , Any ] : builder_types = self . builder_types ( ) + [ self . default_builder_type ( ) ] builders : List [ ParameterBuilder ] = [ builder_type ( build_parameter = self . build , ) for builder_type in builder_types ] builder = next ( builder for builder in builders if ... | Build a swagger parameter from a marshmallow field . |
26,260 | def builder_types ( cls ) -> List [ Type [ ParameterBuilder ] ] : return [ entry_point . load ( ) for entry_point in iter_entry_points ( ENTRY_POINT ) ] | Define the available builder types . |
26,261 | def configure_crud ( graph , ns , mappings ) : convention = CRUDConvention ( graph ) convention . configure ( ns , mappings ) | Register CRUD endpoints for a resource object . |
26,262 | def configure_count ( self , ns , definition ) : @ self . add_route ( ns . collection_path , Operation . Count , ns ) @ qs ( definition . request_schema ) @ wraps ( definition . func ) def count ( ** path_data ) : request_data = load_query_string_data ( definition . request_schema ) response_data = dict ( ) count = def... | Register a count endpoint . |
26,263 | def configure_create ( self , ns , definition ) : @ self . add_route ( ns . collection_path , Operation . Create , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def create ( ** path_data ) : request_data = load_request_data ( definition . reques... | Register a create endpoint . |
26,264 | def configure_updatebatch ( self , ns , definition ) : operation = Operation . UpdateBatch @ self . add_route ( ns . collection_path , operation , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def update_batch ( ** path_data ) : headers = dict (... | Register an update batch endpoint . |
26,265 | def configure_retrieve ( self , ns , definition ) : request_schema = definition . request_schema or Schema ( ) @ self . add_route ( ns . instance_path , Operation . Retrieve , ns ) @ qs ( request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def retrieve ( ** path_data ) : headers =... | Register a retrieve endpoint . |
26,266 | def configure_delete ( self , ns , definition ) : request_schema = definition . request_schema or Schema ( ) @ self . add_route ( ns . instance_path , Operation . Delete , ns ) @ qs ( request_schema ) @ wraps ( definition . func ) def delete ( ** path_data ) : headers = dict ( ) request_data = load_query_string_data ( ... | Register a delete endpoint . |
26,267 | def configure_replace ( self , ns , definition ) : @ self . add_route ( ns . instance_path , Operation . Replace , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def replace ( ** path_data ) : headers = dict ( ) request_data = load_request_data (... | Register a replace endpoint . |
26,268 | def configure_update ( self , ns , definition ) : @ self . add_route ( ns . instance_path , Operation . Update , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def update ( ** path_data ) : headers = dict ( ) request_data = load_request_data ( de... | Register an update endpoint . |
26,269 | def configure_createcollection ( self , ns , definition ) : paginated_list_schema = self . page_cls . make_paginated_list_schema_class ( ns , definition . response_schema , ) ( ) @ self . add_route ( ns . collection_path , Operation . CreateCollection , ns ) @ request ( definition . request_schema ) @ response ( pagina... | Register create collection endpoint . |
26,270 | def parse_ref ( self , field : Field ) -> str : ref_name = type_name ( name_for ( field . schema ) ) return f"#/definitions/{ref_name}" | Parse the reference type for nested fields if any . |
26,271 | def configure_build_info ( graph ) : ns = Namespace ( subject = BuildInfo , ) convention = BuildInfoConvention ( graph ) convention . configure ( ns , retrieve = tuple ( ) ) return convention . build_info | Configure the build info endpoint . |
26,272 | def build_logger_tree ( ) : cache = { } tree = make_logger_node ( "" , root ) for name , logger in sorted ( root . manager . loggerDict . items ( ) ) : if "." in name : parent_name = "." . join ( name . split ( "." ) [ : - 1 ] ) parent = cache [ parent_name ] else : parent = tree cache [ name ] = make_logger_node ( nam... | Build a DFS tree representing the logger layout . |
26,273 | def build ( self , field : Field ) -> Mapping [ str , Any ] : return dict ( self . iter_parsed_values ( field ) ) | Build a parameter . |
26,274 | def iter_parsed_values ( self , field : Field ) -> Iterable [ Tuple [ str , Any ] ] : for key , func in self . parsers . items ( ) : value = func ( field ) if not value : continue yield key , value | Walk the dictionary of parsers and emit all non - null values . |
26,275 | def object_ns ( self ) : return Namespace ( subject = self . object_ , object_ = None , prefix = self . prefix , qualifier = self . qualifier , version = self . version , ) | Create a new namespace for the current namespace s object value . |
26,276 | def url_for ( self , operation , _external = True , ** kwargs ) : return url_for ( self . endpoint_for ( operation ) , _external = _external , ** kwargs ) | Construct a URL for an operation against a resource . |
26,277 | def href_for ( self , operation , qs = None , ** kwargs ) : url = urljoin ( request . url_root , self . url_for ( operation , ** kwargs ) ) qs_character = "?" if url . find ( "?" ) == - 1 else "&" return "{}{}" . format ( url , "{}{}" . format ( qs_character , urlencode ( qs ) ) if qs else "" , ) | Construct an full href for an operation against a resource . |
26,278 | def configure_swagger ( graph ) : ns = Namespace ( subject = graph . config . swagger_convention . name , version = graph . config . swagger_convention . version , ) convention = SwaggerConvention ( graph ) convention . configure ( ns , discover = tuple ( ) ) return ns . subject | Build a singleton endpoint that provides swagger definitions for all operations . |
26,279 | def configure_discover ( self , ns , definition ) : @ self . add_route ( ns . singleton_path , Operation . Discover , ns ) def discover ( ) : swagger = build_swagger ( self . graph , ns , self . find_matching_endpoints ( ns ) ) g . hide_body = True return make_response ( swagger ) | Register a swagger endpoint for a set of operations . |
26,280 | def parse_items ( self , field : Field ) -> Mapping [ str , Any ] : return self . build_parameter ( field . container ) | Parse the child item type for list fields if any . |
26,281 | def for_ ( cls , operation , ns , qs = None , type = None , allow_templates = False , ** kwargs ) : assert isinstance ( ns , Namespace ) try : href , templated = ns . href_for ( operation , qs = qs , ** kwargs ) , False except BuildError as error : if not allow_templates : raise uri_templates = { argument : "{{{}}}" . ... | Create a link to an operation on a resource object . |
26,282 | def build_parameter ( field : Field ) -> Mapping [ str , Any ] : builder = Parameters ( ) return builder . build ( field ) | Build JSON parameter from a marshmallow field . |
26,283 | def configure_savedsearch ( self , ns , definition ) : paginated_list_schema = self . page_cls . make_paginated_list_schema_class ( ns , definition . response_schema , ) ( ) @ self . add_route ( ns . collection_path , Operation . SavedSearch , ns ) @ request ( definition . request_schema ) @ response ( paginated_list_s... | Register a saved search endpoint . |
26,284 | def encode_basic_auth ( username , password ) : return "Basic {}" . format ( b64encode ( "{}:{}" . format ( username , password , ) . encode ( "utf-8" ) ) . decode ( "utf-8" ) ) | Encode basic auth credentials . |
26,285 | def configure_basic_auth_decorator ( graph ) : graph . config . setdefault ( "BASIC_AUTH_REALM" , graph . metadata . name ) return ConfigBasicAuth ( app = graph . flask , credentials = dict ( graph . config . basic_auth . credentials ) , ) | Configure a basic auth decorator . |
26,286 | def check_credentials ( self , username , password ) : return password is not None and self . credentials . get ( username , None ) == password | Override credential checking to use configured credentials . |
26,287 | def challenge ( self ) : response = super ( ConfigBasicAuth , self ) . challenge ( ) raise with_headers ( Unauthorized ( ) , response . headers ) | Override challenge to raise an exception that will trigger regular error handling . |
26,288 | def iter_fields ( self , schema : Schema ) -> Iterable [ Tuple [ str , Field ] ] : for name in sorted ( schema . fields . keys ( ) ) : field = schema . fields [ name ] yield field . dump_to or name , field | Iterate through marshmallow schema fields . |
26,289 | def links ( self ) : links = Links ( ) links [ "self" ] = Link . for_ ( self . _operation , self . _ns , qs = self . _page . to_items ( ) , ** self . _context ) return links | Include a self link . |
26,290 | def links ( self ) : links = super ( OffsetLimitPaginatedList , self ) . links if self . _page . offset + self . _page . limit < self . count : links [ "next" ] = Link . for_ ( self . _operation , self . _ns , qs = self . _page . next_page . to_items ( ) , ** self . _context ) if self . offset > 0 : links [ "prev" ] = ... | Include previous and next links . |
26,291 | def to_items ( self , func = str ) : return [ ( key , func ( self . kwargs [ key ] ) ) for key in sorted ( self . kwargs . keys ( ) ) ] | Contruct a list of dictionary items . |
26,292 | def to_paginated_list ( self , result , _ns , _operation , ** kwargs ) : items , context = self . parse_result ( result ) headers = dict ( ) paginated_list = PaginatedList ( items = items , _page = self , _ns = _ns , _operation = _operation , _context = context , ) return paginated_list , headers | Convert a controller result to a paginated list . |
26,293 | def parse_result ( cls , result ) : if isinstance ( result , tuple ) == 2 : items , context = result else : context = { } items = result return items , context | Parse a simple items result . |
26,294 | def from_query_string ( cls , schema , qs = None ) : dct = load_query_string_data ( schema , qs ) return cls . from_dict ( dct ) | Extract a page from the current query string . |
26,295 | def make_paginated_list_schema_class ( cls , ns , item_schema ) : class PaginatedListSchema ( Schema ) : __alias__ = "{}_list" . format ( ns . subject_name ) items = fields . List ( fields . Nested ( item_schema ) , required = True ) _links = fields . Raw ( ) return PaginatedListSchema | Generate a schema class that represents a paginted list of items . |
26,296 | def parse_result ( cls , result ) : if len ( result ) == 3 : items , count , context = result else : context = { } items , count = result return items , count , context | Parse an items + count tuple result . |
26,297 | def name_for ( obj ) : if isinstance ( obj , str ) : return obj cls = obj if isclass ( obj ) else obj . __class__ if hasattr ( cls , "__alias__" ) : return underscore ( cls . __alias__ ) else : return underscore ( cls . __name__ ) | Get a name for something . |
26,298 | def instance_path_for ( name , identifier_type , identifier_key = None ) : return "/{}/<{}:{}>" . format ( name_for ( name ) , identifier_type , identifier_key or "{}_id" . format ( name_for ( name ) ) , ) | Get a path for thing . |
26,299 | def relation_path_for ( from_name , to_name , identifier_type , identifier_key = None ) : return "/{}/<{}:{}>/{}" . format ( name_for ( from_name ) , identifier_type , identifier_key or "{}_id" . format ( name_for ( from_name ) ) , name_for ( to_name ) , ) | Get a path relating a thing to another . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.