idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
45,500
def _debug_mode_responses ( self , request , response ) : if django . conf . settings . DEBUG_GMN : if 'pretty' in request . GET : response [ 'Content-Type' ] = d1_common . const . CONTENT_TYPE_TEXT if ( 'HTTP_VENDOR_PROFILE_SQL' in request . META or django . conf . settings . DEBUG_PROFILE_SQL ) : response_list = [ ] for query in django . db . connection . queries : response_list . append ( '{}\n{}' . format ( query [ 'time' ] , query [ 'sql' ] ) ) return django . http . HttpResponse ( '\n\n' . join ( response_list ) , d1_common . const . CONTENT_TYPE_TEXT ) return response
Extra functionality available in debug mode .
45,501
def _get_cache_key ( self , obj ) : if obj is not None : return '{}-{}' . format ( id ( self ) , obj . pk ) return "{}-None" . format ( id ( self ) )
Derive cache key for given object .
45,502
def set ( self , obj , build_kwargs ) : if build_kwargs is None : build_kwargs = { } cached = { } if 'queryset' in build_kwargs : cached = { 'model' : build_kwargs [ 'queryset' ] . model , 'pks' : list ( build_kwargs [ 'queryset' ] . values_list ( 'pk' , flat = True ) ) , } elif 'obj' in build_kwargs : cached = { 'obj' : build_kwargs [ 'obj' ] , } if not hasattr ( self . _thread_local , 'cache' ) : self . _thread_local . cache = { } self . _thread_local . cache [ self . _get_cache_key ( obj ) ] = cached
Set cached value .
45,503
def take ( self , obj ) : cached = self . _thread_local . cache [ self . _get_cache_key ( obj ) ] build_kwargs = { } if 'model' in cached and 'pks' in cached : build_kwargs [ 'queryset' ] = cached [ 'model' ] . objects . filter ( pk__in = cached [ 'pks' ] ) elif 'obj' in cached : if cached [ 'obj' ] . __class__ . objects . filter ( pk = cached [ 'obj' ] . pk ) . exists ( ) : build_kwargs [ 'obj' ] = cached [ 'obj' ] else : build_kwargs [ 'queryset' ] = cached [ 'obj' ] . __class__ . objects . none ( ) self . _clean_cache ( obj ) return build_kwargs
Get cached value and clean cache .
45,504
def connect ( self , signal , ** kwargs ) : signal . connect ( self , ** kwargs ) self . connections . append ( ( signal , kwargs ) )
Connect a specific signal type to this receiver .
45,505
def disconnect ( self ) : for signal , kwargs in self . connections : signal . disconnect ( self , ** kwargs )
Disconnect all connected signal types from this receiver .
45,506
def _filter ( self , objects , ** kwargs ) : for obj in objects : if self . filter ( obj , ** kwargs ) is False : return False return True
Determine if dependent object should be processed .
45,507
def _get_build_kwargs ( self , obj , pk_set = None , action = None , update_fields = None , reverse = None , ** kwargs ) : if action is None : if not self . _filter ( [ obj ] , update_fields = update_fields ) : return queryset = getattr ( obj , self . accessor ) . all ( ) if self . field . rel . model == self . field . rel . related_model : queryset = queryset . union ( getattr ( obj , self . field . rel . get_accessor_name ( ) ) . all ( ) ) return { 'queryset' : queryset } else : if self . field . rel . model == self . field . rel . related_model : pks = set ( ) if self . _filter ( self . model . objects . filter ( pk__in = pk_set ) ) : pks . add ( obj . pk ) if self . _filter ( self . model . objects . filter ( pk__in = [ obj . pk ] ) ) : pks . update ( pk_set ) return { 'queryset' : self . index . object_type . objects . filter ( pk__in = pks ) } elif isinstance ( obj , self . model ) : result = { 'queryset' : self . index . object_type . objects . filter ( pk__in = pk_set ) } pk_set = { obj . pk } else : result = { 'obj' : obj } if action != 'post_clear' : if not self . _filter ( self . model . objects . filter ( pk__in = pk_set ) ) : return return result
Prepare arguments for rebuilding indices .
45,508
def process_predelete ( self , obj , pk_set = None , action = None , update_fields = None , ** kwargs ) : build_kwargs = self . _get_build_kwargs ( obj , pk_set , action , update_fields , ** kwargs ) self . delete_cache . set ( obj , build_kwargs )
Render the queryset of influenced objects and cache it .
45,509
def process_delete ( self , obj , pk_set = None , action = None , update_fields = None , ** kwargs ) : build_kwargs = self . delete_cache . take ( obj ) if build_kwargs : self . index . build ( ** build_kwargs )
Recreate queryset from the index and rebuild the index .
45,510
def _process_m2m_through ( self , obj , action ) : source = getattr ( obj , self . field . rel . field . m2m_field_name ( ) ) target = getattr ( obj , self . field . rel . field . m2m_reverse_field_name ( ) ) pk_set = set ( ) if target : pk_set . add ( target . pk ) self . process_m2m ( source , pk_set , action = action , reverse = False , cache_key = obj )
Process custom M2M through model actions .
45,511
def process_m2m_through_save ( self , obj , created = False , ** kwargs ) : if not created : return self . _process_m2m_through ( obj , 'post_add' )
Process M2M post save for custom through model .
45,512
def _connect_signal ( self , index ) : post_save_signal = ElasticSignal ( index , 'build' ) post_save_signal . connect ( post_save , sender = index . object_type ) self . signals . append ( post_save_signal ) post_delete_signal = ElasticSignal ( index , 'remove_object' ) post_delete_signal . connect ( post_delete , sender = index . object_type ) self . signals . append ( post_delete_signal ) for dependency in index . get_dependencies ( ) : if isinstance ( dependency , ( models . ManyToManyField , ManyToManyDescriptor ) ) : dependency = ManyToManyDependency ( dependency ) elif not isinstance ( dependency , Dependency ) : raise TypeError ( "Unsupported dependency type: {}" . format ( repr ( dependency ) ) ) signal = dependency . connect ( index ) self . signals . extend ( signal )
Create signals for building indexes .
45,513
def register_signals ( self ) : for index in self . indexes : if index . object_type : self . _connect_signal ( index )
Register signals for all indexes .
45,514
def discover_indexes ( self ) : self . indexes = [ ] for app_config in apps . get_app_configs ( ) : indexes_path = '{}.elastic_indexes' . format ( app_config . name ) try : indexes_module = import_module ( indexes_path ) for attr_name in dir ( indexes_module ) : attr = getattr ( indexes_module , attr_name ) if inspect . isclass ( attr ) and issubclass ( attr , BaseIndex ) and attr is not BaseIndex : if is_testing ( ) : index = attr . document_class . _index . _name testing_postfix = '_test_{}_{}' . format ( TESTING_UUID , os . getpid ( ) ) if not index . endswith ( testing_postfix ) : if attr . testing_postfix : index = index [ : - len ( attr . testing_postfix ) ] index = index + testing_postfix attr . testing_postfix = testing_postfix attr . document_class . _index . _name = index index = attr ( ) for extension in composer . get_extensions ( attr ) : mapping = getattr ( extension , 'mapping' , { } ) index . mapping . update ( mapping ) self . indexes . append ( index ) except ImportError as ex : if not re . match ( 'No module named .*elastic_indexes.*' , str ( ex ) ) : raise
Save list of index builders into _index_builders .
45,515
def build ( self , obj = None , queryset = None , push = True ) : for index in self . indexes : index . build ( obj , queryset , push )
Trigger building of the indexes .
45,516
def delete ( self , skip_mapping = False ) : for index in self . indexes : index . destroy ( ) if not skip_mapping : index . create_mapping ( )
Delete all entries from ElasticSearch .
45,517
def destroy ( self ) : self . unregister_signals ( ) for index in self . indexes : index . destroy ( ) self . indexes = [ ]
Delete all indexes from Elasticsearch and index builder .
45,518
def _set_initial ( self , C_in , scale_in ) : r self . C_in = C_in self . scale_in = scale_in
r Set the initial values for parameters and Wilson coefficients at the scale scale_in .
45,519
def _rgevolve_leadinglog ( self , scale_out ) : self . _check_initial ( ) return rge . smeft_evolve_leadinglog ( C_in = self . C_in , scale_in = self . scale_in , scale_out = scale_out )
Compute the leading logarithmic approximation to the solution of the SMEFT RGEs from the initial scale to scale_out . Returns a dictionary with parameters and Wilson coefficients . Much faster but less precise that rgevolve .
45,520
def get_smpar ( self , accuracy = 'integrate' , scale_sm = 91.1876 ) : if accuracy == 'integrate' : C_out = self . _rgevolve ( scale_sm ) elif accuracy == 'leadinglog' : C_out = self . _rgevolve_leadinglog ( scale_sm ) else : raise ValueError ( "'{}' is not a valid value of 'accuracy' (must be either 'integrate' or 'leadinglog')." . format ( accuracy ) ) return smpar . smpar ( C_out )
Compute the SM MS - bar parameters at the electroweak scale .
45,521
def run_continuous ( self , scale ) : if scale == self . scale_in : raise ValueError ( "The scale must be different from the input scale" ) elif scale < self . scale_in : scale_min = scale scale_max = self . scale_in elif scale > self . scale_in : scale_max = scale scale_min = self . scale_in fun = rge . smeft_evolve_continuous ( C_in = self . C_in , scale_in = self . scale_in , scale_out = scale ) return wilson . classes . RGsolution ( fun , scale_min , scale_max )
Return a continuous solution to the RGE as RGsolution instance .
45,522
def deconstruct ( self ) : name , path , args , kwargs = super ( ) . deconstruct ( ) if self . populate_from is not None : kwargs [ 'populate_from' ] = self . populate_from if self . unique_with != ( ) : kwargs [ 'unique_with' ] = self . unique_with kwargs . pop ( 'unique' , None ) return name , path , args , kwargs
Deconstruct method .
45,523
def _get_unique_constraints ( self , instance ) : constraints_expression = [ ] constraints_values = { } for field_name in self . unique_with : if constants . LOOKUP_SEP in field_name : raise NotImplementedError ( '`unique_with` constraint does not support lookups by related models.' ) field = instance . _meta . get_field ( field_name ) field_value = getattr ( instance , field_name ) field_db_value = field . get_prep_value ( field_value ) constraint_key = 'unique_' + field_name constraints_expression . append ( "{} = %({})s" . format ( connection . ops . quote_name ( field . column ) , constraint_key ) ) constraints_values [ constraint_key ] = field_db_value if not constraints_expression : return '' , [ ] constraints_expression = 'AND ' + ' AND ' . join ( constraints_expression ) return constraints_expression , constraints_values
Return SQL filter for filtering by fields in unique_with attribute .
45,524
def _get_populate_from_value ( self , instance ) : if hasattr ( self . populate_from , '__call__' ) : return self . populate_from ( instance ) else : attr = getattr ( instance , self . populate_from ) return attr ( ) if callable ( attr ) else attr
Get the value from populate_from attribute .
45,525
def _evaluate_expressions ( self , expression_engine , step_id , values , context ) : if expression_engine is None : return values processed = { } for name , value in values . items ( ) : if isinstance ( value , str ) : value = value . strip ( ) try : expression = expression_engine . get_inline_expression ( value ) if expression is not None : value = expression_engine . evaluate_inline ( expression , context ) else : value = expression_engine . evaluate_block ( value , context ) except EvaluationError as error : raise ExecutionError ( 'Error while evaluating expression for step "{}":\n{}' . format ( step_id , error ) ) elif isinstance ( value , dict ) : value = self . _evaluate_expressions ( expression_engine , step_id , value , context ) processed [ name ] = value return processed
Recursively evaluate expressions in a dictionary of values .
45,526
async def init ( ) : global redis_conn conn = await aioredis . create_connection ( 'redis://{}:{}' . format ( SETTINGS . get ( 'FLOW_EXECUTOR' , { } ) . get ( 'REDIS_CONNECTION' , { } ) . get ( 'host' , 'localhost' ) , SETTINGS . get ( 'FLOW_EXECUTOR' , { } ) . get ( 'REDIS_CONNECTION' , { } ) . get ( 'port' , 56379 ) ) , db = int ( SETTINGS . get ( 'FLOW_EXECUTOR' , { } ) . get ( 'REDIS_CONNECTION' , { } ) . get ( 'db' , 1 ) ) ) redis_conn = aioredis . Redis ( conn )
Create a connection to the Redis server .
45,527
async def send_manager_command ( cmd , expect_reply = True , extra_fields = { } ) : packet = { ExecutorProtocol . DATA_ID : DATA [ 'id' ] , ExecutorProtocol . COMMAND : cmd , } packet . update ( extra_fields ) logger . debug ( "Sending command to listener: {}" . format ( json . dumps ( packet ) ) ) queue_channel = EXECUTOR_SETTINGS [ 'REDIS_CHANNEL_PAIR' ] [ 0 ] try : await redis_conn . rpush ( queue_channel , json . dumps ( packet ) ) except Exception : logger . error ( "Error sending command to manager:\n\n{}" . format ( traceback . format_exc ( ) ) ) raise if not expect_reply : return for _ in range ( _REDIS_RETRIES ) : response = await redis_conn . blpop ( QUEUE_RESPONSE_CHANNEL , timeout = 1 ) if response : break else : raise RuntimeError ( "No response from the manager after {} retries." . format ( _REDIS_RETRIES ) ) _ , item = response result = json . loads ( item . decode ( 'utf-8' ) ) [ ExecutorProtocol . RESULT ] assert result in [ ExecutorProtocol . RESULT_OK , ExecutorProtocol . RESULT_ERROR ] if result == ExecutorProtocol . RESULT_OK : return True return False
Send a properly formatted command to the manager .
45,528
def _annotate_query ( query , generate_dict ) : annotate_key_list = [ ] for field_name , annotate_dict in generate_dict . items ( ) : for annotate_name , annotate_func in annotate_dict [ "annotate_dict" ] . items ( ) : query = annotate_func ( query ) annotate_key_list . append ( annotate_name ) return query , annotate_key_list
Add annotations to the query to retrieve values required by field value generate functions .
45,529
def _value_list_to_sciobj_dict ( sciobj_value_list , lookup_list , lookup_dict , generate_dict ) : sciobj_dict = { } lookup_to_value_dict = { k : v for k , v in zip ( lookup_list , sciobj_value_list ) } for field_name , r_dict in lookup_dict . items ( ) : if r_dict [ "lookup_str" ] in lookup_to_value_dict . keys ( ) : sciobj_dict [ field_name ] = lookup_to_value_dict [ r_dict [ "lookup_str" ] ] for field_name , annotate_dict in generate_dict . items ( ) : for final_name , generate_func in annotate_dict [ "generate_dict" ] . items ( ) : sciobj_dict [ field_name ] = generate_func ( lookup_to_value_dict ) return sciobj_dict
Create a dict where the keys are the requested field names from the values returned by Django .
45,530
def _split_field_list ( field_list ) : lookup_dict = { } generate_dict = { } for field_name in field_list or FIELD_NAME_TO_EXTRACT_DICT . keys ( ) : try : extract_dict = FIELD_NAME_TO_EXTRACT_DICT [ field_name ] except KeyError : assert_invalid_field_list ( field_list ) else : if "lookup_str" in extract_dict : lookup_dict [ field_name ] = extract_dict else : generate_dict [ field_name ] = extract_dict return lookup_dict , generate_dict
Split the list of fields for which to extract values into lists by extraction methods .
45,531
def dataoneTypes ( request ) : if is_v1_api ( request ) : return d1_common . types . dataoneTypes_v1_1 elif is_v2_api ( request ) or is_diag_api ( request ) : return d1_common . types . dataoneTypes_v2_0 else : raise d1_common . types . exceptions . ServiceFailure ( 0 , 'Unknown version designator in URL. url="{}"' . format ( request . path ) )
Return the PyXB binding to use when handling a request .
45,532
def parse_and_normalize_url_date ( date_str ) : if date_str is None : return None try : return d1_common . date_time . dt_from_iso8601_str ( date_str ) except d1_common . date_time . iso8601 . ParseError as e : raise d1_common . types . exceptions . InvalidRequest ( 0 , 'Invalid date format for URL parameter. date="{}" error="{}"' . format ( date_str , str ( e ) ) , )
Parse a ISO 8601 date - time with optional timezone .
45,533
def get ( self , doc_id ) : resp_dict = self . _get_query ( q = 'id:{}' . format ( doc_id ) ) if resp_dict [ 'response' ] [ 'numFound' ] > 0 : return resp_dict [ 'response' ] [ 'docs' ] [ 0 ]
Retrieve the specified document .
45,534
def get_ids ( self , start = 0 , rows = 1000 , ** query_dict ) : resp_dict = self . _get_query ( start = start , rows = rows , ** query_dict ) return { 'matches' : resp_dict [ 'response' ] [ 'numFound' ] , 'start' : start , 'ids' : [ d [ 'id' ] for d in resp_dict [ 'response' ] [ 'docs' ] ] , }
Retrieve a list of identifiers for documents matching the query .
45,535
def get_field_values ( self , name , maxvalues = - 1 , sort = True , ** query_dict ) : param_dict = query_dict . copy ( ) param_dict . update ( { 'rows' : '0' , 'facet' : 'true' , 'facet.field' : name , 'facet.limit' : str ( maxvalues ) , 'facet.zeros' : 'false' , 'facet.sort' : str ( sort ) . lower ( ) , } ) resp_dict = self . _post_query ( ** param_dict ) result_dict = resp_dict [ 'facet_counts' ] [ 'facet_fields' ] result_dict [ 'numFound' ] = resp_dict [ 'response' ] [ 'numFound' ] return result_dict
Retrieve the unique values for a field along with their usage counts .
45,536
def _get_solr_type ( self , field ) : field_type = 'string' try : field_type = FIELD_TYPE_CONVERSION_MAP [ field ] return field_type except : pass fta = field . split ( '_' ) if len ( fta ) > 1 : ft = fta [ len ( fta ) - 1 ] try : field_type = FIELD_TYPE_CONVERSION_MAP [ ft ] FIELD_TYPE_CONVERSION_MAP [ field ] = field_type except : pass return field_type
Returns the Solr type of the specified field name .
45,537
def _get_query ( self , ** query_dict ) : param_dict = query_dict . copy ( ) return self . _send_query ( do_post = False , ** param_dict )
Perform a GET query against Solr and return the response as a Python dict .
45,538
def _post_query ( self , ** query_dict ) : param_dict = query_dict . copy ( ) return self . _send_query ( do_post = True , ** param_dict )
Perform a POST query against Solr and return the response as a Python dict .
45,539
def _send_query ( self , do_post = False , ** query_dict ) : param_dict = query_dict . copy ( ) param_dict . setdefault ( 'wt' , 'json' ) param_dict . setdefault ( 'q' , '*.*' ) param_dict . setdefault ( 'fl' , '*' ) return self . query ( 'solr' , '' , do_post = do_post , query = param_dict )
Perform a query against Solr and return the response as a Python dict .
45,540
def _escape_query_term ( self , term ) : term = term . replace ( '\\' , '\\\\' ) for c in RESERVED_CHAR_LIST : term = term . replace ( c , r'\{}' . format ( c ) ) return term
Escape a query term for inclusion in a query .
45,541
def migrate_flow_collection ( apps , schema_editor ) : Process = apps . get_model ( 'flow' , 'Process' ) DescriptorSchema = apps . get_model ( 'flow' , 'DescriptorSchema' ) for process in Process . objects . all ( ) : process . entity_type = process . flow_collection process . entity_descriptor_schema = process . flow_collection if ( process . entity_descriptor_schema is not None and not DescriptorSchema . objects . filter ( slug = process . entity_descriptor_schema ) . exists ( ) ) : raise LookupError ( "Descriptow schema '{}' referenced in 'entity_descriptor_schema' not " "found." . format ( process . entity_descriptor_schema ) ) process . save ( )
Migrate flow_collection field to entity_type .
45,542
def get_pyxb_binding_by_api_version ( api_major , api_minor = 0 ) : try : return VERSION_TO_BINDING_DICT [ api_major , api_minor ] except KeyError : raise ValueError ( 'Unknown DataONE API version: {}.{}' . format ( api_major , api_minor ) )
Map DataONE API version tag to PyXB binding .
45,543
def extract_version_tag_from_url ( url ) : m = re . match ( r'(/|^)(v\d)(/|$)' , url ) if not m : return None return m . group ( 2 )
Extract a DataONE API version tag from a MN or CN service endpoint URL .
45,544
def str_to_v1_str ( xml_str ) : if str_is_v1 ( xml_str ) : return xml_str etree_obj = str_to_etree ( xml_str ) strip_v2_elements ( etree_obj ) etree_replace_namespace ( etree_obj , d1_common . types . dataoneTypes_v1 . Namespace ) return etree_to_str ( etree_obj )
Convert a API v2 XML doc to v1 XML doc .
45,545
def str_to_v2_str ( xml_str ) : if str_is_v2 ( xml_str ) : return xml_str etree_obj = str_to_etree ( xml_str ) etree_replace_namespace ( etree_obj , d1_common . types . dataoneTypes_v2_0 . Namespace ) return etree_to_str ( etree_obj )
Convert a API v1 XML doc to v2 XML doc .
45,546
def str_to_etree ( xml_str , encoding = 'utf-8' ) : parser = xml . etree . ElementTree . XMLParser ( encoding = encoding ) return xml . etree . ElementTree . fromstring ( xml_str , parser = parser )
Deserialize API XML doc to an ElementTree .
45,547
def etree_replace_namespace ( etree_obj , ns_str ) : def _replace_recursive ( el , n ) : el . tag = re . sub ( r'{.*\}' , '{{{}}}' . format ( n ) , el . tag ) el . text = el . text . strip ( ) if el . text else None el . tail = el . tail . strip ( ) if el . tail else None for child_el in el : _replace_recursive ( child_el , n ) _replace_recursive ( etree_obj , ns_str )
In - place change the namespace of elements in an ElementTree .
45,548
def strip_v2_elements ( etree_obj ) : if etree_obj . tag == v2_0_tag ( 'logEntry' ) : strip_logEntry ( etree_obj ) elif etree_obj . tag == v2_0_tag ( 'log' ) : strip_log ( etree_obj ) elif etree_obj . tag == v2_0_tag ( 'node' ) : strip_node ( etree_obj ) elif etree_obj . tag == v2_0_tag ( 'nodeList' ) : strip_node_list ( etree_obj ) elif etree_obj . tag == v2_0_tag ( 'systemMetadata' ) : strip_system_metadata ( etree_obj ) else : raise ValueError ( 'Unknown root element. tag="{}"' . format ( etree_obj . tag ) )
In - place remove elements and attributes that are only valid in v2 types .
45,549
def strip_system_metadata ( etree_obj ) : for series_id_el in etree_obj . findall ( 'seriesId' ) : etree_obj . remove ( series_id_el ) for media_type_el in etree_obj . findall ( 'mediaType' ) : etree_obj . remove ( media_type_el ) for file_name_el in etree_obj . findall ( 'fileName' ) : etree_obj . remove ( file_name_el )
In - place remove elements and attributes that are only valid in v2 types from v1 System Metadata .
45,550
def _create_replica ( self , sysmeta_pyxb , sciobj_bytestream ) : pid = d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) self . _assert_is_pid_of_local_unprocessed_replica ( pid ) self . _check_and_create_replica_revision ( sysmeta_pyxb , "obsoletes" ) self . _check_and_create_replica_revision ( sysmeta_pyxb , "obsoletedBy" ) sciobj_url = d1_gmn . app . sciobj_store . get_rel_sciobj_file_url_by_pid ( pid ) sciobj_model = d1_gmn . app . sysmeta . create_or_update ( sysmeta_pyxb , sciobj_url ) self . _store_science_object_bytes ( pid , sciobj_bytestream ) d1_gmn . app . event_log . create_log_entry ( sciobj_model , "create" , "0.0.0.0" , "[replica]" , "[replica]" )
GMN handles replicas differently from native objects with the main differences being related to handling of restrictions related to revision chains and SIDs .
45,551
async def restricted_import ( self , async_client , node_type ) : item_task_name = "Importing objects" pid_path = self . options [ 'pid_path' ] if not os . path . exists ( pid_path ) : raise ConnectionError ( 'File does not exist: {}' . format ( pid_path ) ) with open ( pid_path , encoding = 'UTF-8' ) as pid_file : self . progress_logger . start_task_type ( item_task_name , len ( pid_file . readlines ( ) ) ) pid_file . seek ( 0 ) for pid in pid_file . readlines ( ) : pid = pid . strip ( ) self . progress_logger . start_task ( item_task_name ) if not pid : continue await self . import_aggregated ( async_client , pid ) self . progress_logger . end_task_type ( item_task_name )
Import only the Science Objects specified by a text file .
45,552
async def get_object_proxy_location ( self , client , pid ) : try : return ( await client . describe ( pid ) ) . get ( "DataONE-Proxy" ) except d1_common . types . exceptions . DataONEException : pass
If object is proxied return the proxy location URL .
45,553
async def probe_node_type_major ( self , client ) : try : node_pyxb = await self . get_node_doc ( client ) except d1_common . types . exceptions . DataONEException as e : raise django . core . management . base . CommandError ( "Could not find a functional CN or MN at the provided BaseURL. " 'base_url="{}" error="{}"' . format ( self . options [ "baseurl" ] , e . friendly_format ( ) ) ) is_cn = d1_common . type_conversions . pyxb_get_type_name ( node_pyxb ) == "NodeList" if is_cn : self . assert_is_known_node_id ( node_pyxb , django . conf . settings . NODE_IDENTIFIER ) self . _logger . info ( "Importing from CN: {}. filtered on MN: {}" . format ( d1_common . xml . get_req_val ( self . find_node ( node_pyxb , self . options [ "baseurl" ] ) . identifier ) , django . conf . settings . NODE_IDENTIFIER , ) ) return "cn" , "v2" else : self . _logger . info ( "Importing from MN: {}" . format ( d1_common . xml . get_req_val ( node_pyxb . identifier ) ) ) return "mn" , self . find_node_api_version ( node_pyxb )
Determine if import source node is a CN or MN and which major version API to use .
45,554
def assert_is_known_node_id ( self , node_list_pyxb , node_id ) : node_pyxb = self . find_node_by_id ( node_list_pyxb , node_id ) assert node_pyxb is not None , ( "The NodeID of this GMN instance is unknown to the CN at the provided BaseURL. " 'node_id="{}" base_url="{}"' . format ( node_id , self . options [ "baseurl" ] ) )
When importing from a CN ensure that the NodeID which the ObjectList will be filtered by is known to the CN .
45,555
def find_node_api_version ( self , node_pyxb ) : max_major = 0 for s in node_pyxb . services . service : max_major = max ( max_major , int ( s . version [ 1 : ] ) ) return max_major
Find the highest API major version supported by node .
45,556
def celery_run ( data_id , runtime_dir , argv ) : subprocess . Popen ( argv , cwd = runtime_dir , stdin = subprocess . DEVNULL ) . wait ( )
Run process executor .
45,557
def archive_sciobj ( pid ) : sciobj_model = d1_gmn . app . model_util . get_sci_model ( pid ) sciobj_model . is_archived = True sciobj_model . save ( ) _update_modified_timestamp ( sciobj_model )
Set the status of an object to archived .
45,558
def create_or_update ( sysmeta_pyxb , sciobj_url = None ) : pid = d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) if sciobj_url is None : sciobj_url = d1_gmn . app . sciobj_store . get_rel_sciobj_file_url_by_pid ( pid ) try : sci_model = d1_gmn . app . model_util . get_sci_model ( pid ) except d1_gmn . app . models . ScienceObject . DoesNotExist : sci_model = d1_gmn . app . models . ScienceObject ( ) sci_model . pid = d1_gmn . app . did . get_or_create_did ( pid ) sci_model . url = sciobj_url sci_model . serial_version = sysmeta_pyxb . serialVersion sci_model . uploaded_timestamp = d1_common . date_time . normalize_datetime_to_utc ( sysmeta_pyxb . dateUploaded ) _base_pyxb_to_model ( sci_model , sysmeta_pyxb ) sci_model . save ( ) if _has_media_type_pyxb ( sysmeta_pyxb ) : _media_type_pyxb_to_model ( sci_model , sysmeta_pyxb ) _access_policy_pyxb_to_model ( sci_model , sysmeta_pyxb ) if _has_replication_policy_pyxb ( sysmeta_pyxb ) : _replication_policy_pyxb_to_model ( sci_model , sysmeta_pyxb ) replica_pyxb_to_model ( sci_model , sysmeta_pyxb ) revision_pyxb_to_model ( sci_model , sysmeta_pyxb , pid ) sci_model . save ( ) return sci_model
Create or update database representation of a System Metadata object and closely related internal state .
45,559
def _access_policy_pyxb_to_model ( sci_model , sysmeta_pyxb ) : _delete_existing_access_policy ( sysmeta_pyxb ) allow_rights_holder = d1_common . types . dataoneTypes . AccessRule ( ) permission = d1_common . types . dataoneTypes . Permission ( d1_gmn . app . auth . CHANGEPERMISSION_STR ) allow_rights_holder . permission . append ( permission ) allow_rights_holder . subject . append ( d1_common . xml . get_req_val ( sysmeta_pyxb . rightsHolder ) ) top_level = _get_highest_level_action_for_rule ( allow_rights_holder ) _insert_permission_rows ( sci_model , allow_rights_holder , top_level ) if _has_access_policy_pyxb ( sysmeta_pyxb ) : for allow_rule in sysmeta_pyxb . accessPolicy . allow : top_level = _get_highest_level_action_for_rule ( allow_rule ) _insert_permission_rows ( sci_model , allow_rule , top_level )
Create or update the database representation of the sysmeta_pyxb access policy .
45,560
def deserialize_subject_info ( subject_info_xml_path ) : try : with open ( subject_info_xml_path ) as f : return d1_common . xml . deserialize ( f . read ( ) ) except ValueError as e : raise d1_common . types . exceptions . InvalidToken ( 0 , 'Could not deserialize SubjectInfo. subject_info="{}", error="{}"' . format ( subject_info_xml_path , str ( e ) ) , )
Deserialize a SubjectInfo XML file to a PyXB object .
45,561
def HEAD ( self , rest_path_list , ** kwargs ) : kwargs . setdefault ( "allow_redirects" , False ) return self . _request ( "HEAD" , rest_path_list , ** kwargs )
Send a HEAD request . See requests . sessions . request for optional parameters .
45,562
def get_curl_command_line ( self , method , url , ** kwargs ) : if kwargs . get ( "query" ) : url = "{}?{}" . format ( url , d1_common . url . urlencode ( kwargs [ "query" ] ) ) curl_list = [ "curl" ] if method . lower ( ) == "head" : curl_list . append ( "--head" ) else : curl_list . append ( "-X {}" . format ( method ) ) for k , v in sorted ( list ( kwargs [ "headers" ] . items ( ) ) ) : curl_list . append ( '-H "{}: {}"' . format ( k , v ) ) curl_list . append ( "{}" . format ( url ) ) return " " . join ( curl_list )
Get request as cURL command line for debugging .
45,563
def dump_request_and_response ( self , response ) : if response . reason is None : response . reason = "<unknown>" return d1_client . util . normalize_request_response_dump ( requests_toolbelt . utils . dump . dump_response ( response ) )
Return a string containing a nicely formatted representation of the request and response objects for logging and debugging .
45,564
def _timeout_to_float ( self , timeout ) : if timeout is not None : try : timeout_float = float ( timeout ) except ValueError : raise ValueError ( 'timeout_sec must be a valid number or None. timeout="{}"' . format ( timeout ) ) if timeout_float : return timeout_float
Convert timeout to float .
45,565
def main ( ) : parser = argparse . ArgumentParser ( description = __doc__ , formatter_class = argparse . RawDescriptionHelpFormatter ) parser . add_argument ( "path" , help = "Python module path" ) args = parser . parse_args ( ) r = d1_dev . util . redbaron_module_path_to_tree ( args . path ) print ( r . help ( True ) )
Print the RedBaron syntax tree for a Python module .
45,566
def abs_path_from_base ( base_path , rel_path ) : return os . path . abspath ( os . path . join ( os . path . dirname ( sys . _getframe ( 1 ) . f_code . co_filename ) , base_path , rel_path ) )
Join a base and a relative path and return an absolute path to the resulting location .
45,567
def abs_path ( rel_path ) : return os . path . abspath ( os . path . join ( os . path . dirname ( sys . _getframe ( 1 ) . f_code . co_filename ) , rel_path ) )
Convert a path that is relative to the module from which this function is called to an absolute path .
45,568
def get_attr_value ( self , attr_key , el_idx = 0 ) : return self . get_element_by_attr_key ( attr_key , el_idx ) . attrib [ attr_key ]
Return the value of the selected attribute in the selected element .
45,569
def set_attr_text ( self , attr_key , attr_val , el_idx = 0 ) : self . get_element_by_attr_key ( attr_key , el_idx ) . attrib [ attr_key ] = attr_val
Set the value of the selected attribute of the selected element .
45,570
def get_element_dt ( self , el_name , tz = None , el_idx = 0 ) : return iso8601 . parse_date ( self . get_element_by_name ( el_name , el_idx ) . text , tz )
Return the text of the selected element as a datetime . datetime object .
45,571
def set_element_dt ( self , el_name , dt , tz = None , el_idx = 0 ) : dt = d1_common . date_time . cast_naive_datetime_to_tz ( dt , tz ) self . get_element_by_name ( el_name , el_idx ) . text = dt . isoformat ( )
Set the text of the selected element to an ISO8601 formatted datetime .
45,572
def as_sql ( self , compiler , connection ) : sql , params = super ( ) . as_sql ( compiler , connection ) params . append ( self . path ) return sql , params
Compile SQL for this function .
45,573
def with_json_path ( self , path , field = None ) : if field is None : field = '_' . join ( [ 'json' ] + json_path_components ( path ) ) kwargs = { field : JsonGetPath ( 'json' , path ) } return self . defer ( 'json' ) . annotate ( ** kwargs )
Annotate Storage objects with a specific JSON path .
45,574
def get_json_path ( self , path ) : return self . with_json_path ( path , field = 'result' ) . values_list ( 'result' , flat = True )
Return only a specific JSON path of Storage objects .
45,575
def _get_storage ( self ) : if self . _json is None : self . _json = Storage . objects . get ( ** self . _kwargs ) . json
Load json field from Storage object .
45,576
def m2Lambda_to_vMh2 ( m2 , Lambda , C ) : try : v = ( sqrt ( 2 * m2 / Lambda ) + 3 * m2 ** ( 3 / 2 ) / ( sqrt ( 2 ) * Lambda ** ( 5 / 2 ) ) * C [ 'phi' ] ) except ValueError : v = 0 Mh2 = 2 * m2 * ( 1 - m2 / Lambda * ( 3 * C [ 'phi' ] - 4 * Lambda * C [ 'phiBox' ] + Lambda * C [ 'phiD' ] ) ) return { 'v' : v , 'Mh2' : Mh2 }
Function to numerically determine the physical Higgs VEV and mass given the parameters of the Higgs potential .
45,577
def smeftpar ( scale , C , basis ) : MW = p [ 'm_W' ] GF = p [ 'GF' ] Mh = p [ 'm_h' ] vb = sqrt ( 1 / sqrt ( 2 ) / GF ) v = vb _d = vMh2_to_m2Lambda ( v = v , Mh2 = Mh ** 2 , C = C ) m2 = _d [ 'm2' ] . real Lambda = _d [ 'Lambda' ] . real gsbar = sqrt ( 4 * pi * p [ 'alpha_s' ] ) gs = ( 1 - C [ 'phiG' ] * ( v ** 2 ) ) * gsbar gbar = 2 * MW / v g = gbar * ( 1 - C [ 'phiW' ] * ( v ** 2 ) ) ebar = sqrt ( 4 * pi * p [ 'alpha_e' ] ) gp = get_gpbar ( ebar , gbar , v , C ) c = { } c [ 'm2' ] = m2 c [ 'Lambda' ] = Lambda c [ 'g' ] = g c [ 'gp' ] = gp c [ 'gs' ] = gs K = ckmutil . ckm . ckm_tree ( p [ 'Vus' ] , p [ 'Vub' ] , p [ 'Vcb' ] , p [ 'delta' ] ) if basis == 'Warsaw' : Mu = K . conj ( ) . T @ np . diag ( [ p [ 'm_u' ] , p [ 'm_c' ] , p [ 'm_t' ] ] ) Md = np . diag ( [ p [ 'm_d' ] , p [ 'm_s' ] , p [ 'm_b' ] ] ) elif basis == 'Warsaw up' : Mu = np . diag ( [ p [ 'm_u' ] , p [ 'm_c' ] , p [ 'm_t' ] ] ) Md = K @ np . diag ( [ p [ 'm_d' ] , p [ 'm_s' ] , p [ 'm_b' ] ] ) else : raise ValueError ( "Basis '{}' not supported" . format ( basis ) ) Me = np . diag ( [ p [ 'm_e' ] , p [ 'm_mu' ] , p [ 'm_tau' ] ] ) c [ 'Gd' ] = Md / ( v / sqrt ( 2 ) ) + C [ 'dphi' ] * ( v ** 2 ) / 2 c [ 'Gu' ] = Mu / ( v / sqrt ( 2 ) ) + C [ 'uphi' ] * ( v ** 2 ) / 2 c [ 'Ge' ] = Me / ( v / sqrt ( 2 ) ) + C [ 'ephi' ] * ( v ** 2 ) / 2 return c
Get the running parameters in SMEFT .
45,578
def scale_8 ( b ) : a = np . array ( b , copy = True , dtype = complex ) for i in range ( 3 ) : a [ 0 , 0 , 1 , i ] = 1 / 2 * b [ 0 , 0 , 1 , i ] a [ 0 , 0 , 2 , i ] = 1 / 2 * b [ 0 , 0 , 2 , i ] a [ 0 , 1 , 1 , i ] = 1 / 2 * b [ 0 , 1 , 1 , i ] a [ 0 , 1 , 2 , i ] = 2 / 3 * b [ 0 , 1 , 2 , i ] - 1 / 6 * b [ 0 , 2 , 1 , i ] - 1 / 6 * b [ 1 , 0 , 2 , i ] + 1 / 6 * b [ 1 , 2 , 0 , i ] a [ 0 , 2 , 1 , i ] = - ( 1 / 6 ) * b [ 0 , 1 , 2 , i ] + 2 / 3 * b [ 0 , 2 , 1 , i ] + 1 / 6 * b [ 1 , 0 , 2 , i ] + 1 / 3 * b [ 1 , 2 , 0 , i ] a [ 0 , 2 , 2 , i ] = 1 / 2 * b [ 0 , 2 , 2 , i ] a [ 1 , 0 , 2 , i ] = - ( 1 / 6 ) * b [ 0 , 1 , 2 , i ] + 1 / 6 * b [ 0 , 2 , 1 , i ] + 2 / 3 * b [ 1 , 0 , 2 , i ] - 1 / 6 * b [ 1 , 2 , 0 , i ] a [ 1 , 1 , 2 , i ] = 1 / 2 * b [ 1 , 1 , 2 , i ] a [ 1 , 2 , 0 , i ] = 1 / 6 * b [ 0 , 1 , 2 , i ] + 1 / 3 * b [ 0 , 2 , 1 , i ] - 1 / 6 * b [ 1 , 0 , 2 , i ] + 2 / 3 * b [ 1 , 2 , 0 , i ] a [ 1 , 2 , 2 , i ] = 1 / 2 * b [ 1 , 2 , 2 , i ] return a
Translations necessary for class - 8 coefficients to go from a basis with only non - redundant WCxf operators to a basis where the Wilson coefficients are symmetrized like the operators .
45,579
def arrays2wcxf ( C ) : d = { } for k , v in C . items ( ) : if np . shape ( v ) == ( ) or np . shape ( v ) == ( 1 , ) : d [ k ] = v else : ind = np . indices ( v . shape ) . reshape ( v . ndim , v . size ) . T for i in ind : name = k + '_' + '' . join ( [ str ( int ( j ) + 1 ) for j in i ] ) d [ name ] = v [ tuple ( i ) ] return d
Convert a dictionary with Wilson coefficient names as keys and numbers or numpy arrays as values to a dictionary with a Wilson coefficient name followed by underscore and numeric indices as keys and numbers as values . This is needed for the output in WCxf format .
45,580
def wcxf2arrays ( d ) : C = { } for k , v in d . items ( ) : name = k . split ( '_' ) [ 0 ] s = C_keys_shape [ name ] if s == 1 : C [ k ] = v else : ind = k . split ( '_' ) [ - 1 ] if name not in C : C [ name ] = np . zeros ( s , dtype = complex ) C [ name ] [ tuple ( [ int ( i ) - 1 for i in ind ] ) ] = v return C
Convert a dictionary with a Wilson coefficient name followed by underscore and numeric indices as keys and numbers as values to a dictionary with Wilson coefficient names as keys and numbers or numpy arrays as values . This is needed for the parsing of input in WCxf format .
45,581
def add_missing ( C ) : C_out = C . copy ( ) for k in ( set ( WC_keys ) - set ( C . keys ( ) ) ) : C_out [ k ] = np . zeros ( C_keys_shape [ k ] ) return C_out
Add arrays with zeros for missing Wilson coefficient keys
45,582
def C_array2dict ( C ) : d = OrderedDict ( ) i = 0 for k in C_keys : s = C_keys_shape [ k ] if s == 1 : j = i + 1 d [ k ] = C [ i ] else : j = i + reduce ( operator . mul , s , 1 ) d [ k ] = C [ i : j ] . reshape ( s ) i = j return d
Convert a 1D array containing C values to a dictionary .
45,583
def C_dict2array ( C ) : return np . hstack ( [ np . asarray ( C [ k ] ) . ravel ( ) for k in C_keys ] )
Convert an OrderedDict containing C values to a 1D array .
45,584
def unscale_dict ( C ) : C_out = { k : _scale_dict [ k ] * v for k , v in C . items ( ) } for k in C_symm_keys [ 8 ] : C_out [ 'qqql' ] = unscale_8 ( C_out [ 'qqql' ] ) return C_out
Undo the scaling applied in scale_dict .
45,585
def wcxf2arrays_symmetrized ( d ) : C = wcxf2arrays ( d ) C = symmetrize_nonred ( C ) C = add_missing ( C ) return C
Convert a dictionary with a Wilson coefficient name followed by underscore and numeric indices as keys and numbers as values to a dictionary with Wilson coefficient names as keys and numbers or numpy arrays as values .
45,586
def commit_signal ( data_id ) : if not getattr ( settings , 'FLOW_MANAGER_DISABLE_AUTO_CALLS' , False ) : immediate = getattr ( settings , 'FLOW_MANAGER_SYNC_AUTO_CALLS' , False ) async_to_sync ( manager . communicate ) ( data_id = data_id , save_settings = False , run_sync = immediate )
Nudge manager at the end of every Data object save event .
45,587
def delete_entity ( sender , instance , ** kwargs ) : Entity . objects . annotate ( num_data = Count ( 'data' ) ) . filter ( data = instance , num_data = 1 ) . delete ( )
Delete Entity when last Data object is deleted .
45,588
def delete_relation ( sender , instance , ** kwargs ) : def process_signal ( relation_id ) : try : relation = Relation . objects . get ( pk = relation_id ) except Relation . DoesNotExist : return if relation . entities . count ( ) == 0 : relation . delete ( ) transaction . on_commit ( lambda : process_signal ( instance . relation_id ) )
Delete the Relation object when the last Entity is removed .
45,589
async def run_executor ( ) : parser = argparse . ArgumentParser ( description = "Run the specified executor." ) parser . add_argument ( 'module' , help = "The module from which to instantiate the concrete executor." ) args = parser . parse_args ( ) module_name = '{}.run' . format ( args . module ) class_name = 'FlowExecutor' module = import_module ( module_name , __package__ ) executor = getattr ( module , class_name ) ( ) with open ( ExecutorFiles . PROCESS_SCRIPT , 'rt' ) as script_file : await executor . run ( DATA [ 'id' ] , script_file . read ( ) )
Start the actual execution ; instantiate the executor and run .
45,590
def ready ( self ) : from . composer import composer composer . discover_extensions ( ) is_migrating = sys . argv [ 1 : 2 ] == [ 'migrate' ] if is_migrating : return from . import signals from . builder import index_builder
Perform application initialization .
45,591
def to_internal_value ( self , data ) : user = getattr ( self . context . get ( 'request' ) , 'user' ) queryset = self . get_queryset ( ) permission = get_full_perm ( 'view' , queryset . model ) try : return get_objects_for_user ( user , permission , queryset . filter ( ** { self . slug_field : data } ) , ) . latest ( ) except ObjectDoesNotExist : self . fail ( 'does_not_exist' , slug_name = self . slug_field , value = smart_text ( data ) , model_name = queryset . model . _meta . model_name , ) except ( TypeError , ValueError ) : self . fail ( 'invalid' )
Convert to internal value .
45,592
def object_hook ( dct ) : try : if 'BOOL' in dct : return dct [ 'BOOL' ] if 'S' in dct : val = dct [ 'S' ] try : return datetime . strptime ( val , '%Y-%m-%dT%H:%M:%S.%f' ) except : return str ( val ) if 'SS' in dct : return list ( dct [ 'SS' ] ) if 'N' in dct : if re . match ( "^-?\d+?\.\d+?$" , dct [ 'N' ] ) is not None : return float ( dct [ 'N' ] ) else : try : return int ( dct [ 'N' ] ) except : return int ( dct [ 'N' ] ) if 'B' in dct : return str ( dct [ 'B' ] ) if 'NS' in dct : return set ( dct [ 'NS' ] ) if 'BS' in dct : return set ( dct [ 'BS' ] ) if 'M' in dct : return dct [ 'M' ] if 'L' in dct : return dct [ 'L' ] if 'NULL' in dct and dct [ 'NULL' ] is True : return None except : return dct for key , val in six . iteritems ( dct ) : if isinstance ( val , six . string_types ) : try : dct [ key ] = datetime . strptime ( val , '%Y-%m-%dT%H:%M:%S.%f' ) except : pass if isinstance ( val , Decimal ) : if val % 1 > 0 : dct [ key ] = float ( val ) elif six . PY3 : dct [ key ] = int ( val ) elif val < sys . maxsize : dct [ key ] = int ( val ) else : dct [ key ] = long ( val ) return dct
DynamoDB object hook to return python values
45,593
def tileAddress ( self , zoom , point ) : "Returns a tile address based on a zoom level and \ a point in the tile" [ x , y ] = point assert x <= self . MAXX and x >= self . MINX assert y <= self . MAXY and y >= self . MINY assert zoom in range ( 0 , len ( self . RESOLUTIONS ) ) tileS = self . tileSize ( zoom ) offsetX = abs ( x - self . MINX ) if self . originCorner == 'bottom-left' : offsetY = abs ( y - self . MINY ) elif self . originCorner == 'top-left' : offsetY = abs ( self . MAXY - y ) col = offsetX / tileS row = offsetY / tileS if x in ( self . MINX , self . MAXX ) and col . is_integer ( ) : col = max ( 0 , col - 1 ) if y in ( self . MINY , self . MAXY ) and row . is_integer ( ) : row = max ( 0 , row - 1 ) return [ int ( math . floor ( col ) ) , int ( math . floor ( row ) ) ]
Returns a tile address based on a zoom level and \ a point in the tile
45,594
def intersectsExtent ( self , extent ) : "Determine if an extent intersects this instance extent" return self . extent [ 0 ] <= extent [ 2 ] and self . extent [ 2 ] >= extent [ 0 ] and self . extent [ 1 ] <= extent [ 3 ] and self . extent [ 3 ] >= extent [ 1 ]
Determine if an extent intersects this instance extent
45,595
def iterGrid ( self , minZoom , maxZoom ) : "Yields the tileBounds, zoom, tileCol and tileRow" assert minZoom in range ( 0 , len ( self . RESOLUTIONS ) ) assert maxZoom in range ( 0 , len ( self . RESOLUTIONS ) ) assert minZoom <= maxZoom for zoom in xrange ( minZoom , maxZoom + 1 ) : [ minRow , minCol , maxRow , maxCol ] = self . getExtentAddress ( zoom ) for row in xrange ( minRow , maxRow + 1 ) : for col in xrange ( minCol , maxCol + 1 ) : tileBounds = self . tileBounds ( zoom , col , row ) yield ( tileBounds , zoom , col , row )
Yields the tileBounds zoom tileCol and tileRow
45,596
def numberOfXTilesAtZoom ( self , zoom ) : "Returns the number of tiles over x at a given zoom level" [ minRow , minCol , maxRow , maxCol ] = self . getExtentAddress ( zoom ) return maxCol - minCol + 1
Returns the number of tiles over x at a given zoom level
45,597
def numberOfYTilesAtZoom ( self , zoom ) : "Retruns the number of tiles over y at a given zoom level" [ minRow , minCol , maxRow , maxCol ] = self . getExtentAddress ( zoom ) return maxRow - minRow + 1
Retruns the number of tiles over y at a given zoom level
45,598
def numberOfTilesAtZoom ( self , zoom ) : "Returns the total number of tile at a given zoom level" [ minRow , minCol , maxRow , maxCol ] = self . getExtentAddress ( zoom ) return ( maxCol - minCol + 1 ) * ( maxRow - minRow + 1 )
Returns the total number of tile at a given zoom level
45,599
def totalNumberOfTiles ( self , minZoom = None , maxZoom = None ) : "Return the total number of tiles for this instance extent" nbTiles = 0 minZoom = minZoom or 0 if maxZoom : maxZoom = maxZoom + 1 else : maxZoom = len ( self . RESOLUTIONS ) for zoom in xrange ( minZoom , maxZoom ) : nbTiles += self . numberOfTilesAtZoom ( zoom ) return nbTiles
Return the total number of tiles for this instance extent