idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
46,600
def alias_field ( model , field ) : for part in field . split ( LOOKUP_SEP ) [ : - 1 ] : model = associate_model ( model , part ) return model . __name__ + "-" + field . split ( LOOKUP_SEP ) [ - 1 ]
Return the prefix name of a field
46,601
def associate_model ( model , field ) : class_field = model . _meta . get_field ( field ) if hasattr ( class_field , "field" ) : return class_field . field . related . related_model else : return class_field . related_model
Return the model associate to the ForeignKey or ManyToMany relation
46,602
def get_formfield ( model , field ) : class_field = model . _meta . get_field ( field ) if hasattr ( class_field , "field" ) : formfield = class_field . field . formfield ( ) else : formfield = class_field . formfield ( ) if isinstance ( formfield , ChoiceField ) : formfield . choices = class_field . get_choices ( ) return formfield
Return the formfied associate to the field of the model
46,603
def get_q_object ( self ) : q_object = Q ( ) for field in self . searchable_fields : value = self . request . GET . getlist ( alias_field ( self . model , field ) , None ) mini_q = Q ( ) for val in value : attr = "{0}{1}" . format ( field , self . specifications . get ( field , '' ) ) if val : dic_tmp = { attr : val } mini_q |= Q ( ** dic_tmp ) q_object &= mini_q return q_object
Build Q object to filter the queryset
46,604
def get_search_form ( self ) : magic_dico_form = self . get_dict_for_forms ( ) forms = [ ] initial = list ( self . request . GET . lists ( ) ) for key , value in magic_dico_form . items ( ) : form = Form ( ) model = value [ "model" ] if not value [ "fields" ] : continue for field in value [ "fields" ] : formfield = get_formfield ( model , field ) formfield . widget . attrs . update ( { 'class' : self . css_class } ) form . fields . update ( { field : formfield } ) initial_tmp = { } for k , vals in initial : tmp_list = k . split ( model . __name__ + "-" ) if len ( tmp_list ) == 2 : list_val_tmp = vals [ 0 ] if len ( vals ) == 1 else [ val for val in vals if val != '' ] initial_tmp [ tmp_list [ - 1 ] ] = list_val_tmp form . initial = initial_tmp form . prefix = model . __name__ forms . append ( form ) return sorted ( forms , key = lambda form : form . prefix )
Return list of form based on model
46,605
def get_dict_for_forms ( self ) : magic_dico = field_to_dict ( self . searchable_fields ) dico = { } def dict_from_fields_r ( mini_dict , dico , model ) : dico [ str ( model ) ] = { } dico [ str ( model ) ] [ "model" ] = model dico [ str ( model ) ] [ "fields" ] = [ ] for key , value in mini_dict . items ( ) : if isinstance ( value , bool ) : continue if value == EMPTY_DICT : dico [ str ( model ) ] [ "fields" ] . append ( key ) elif EMPTY_DICT . items ( ) <= value . items ( ) : dico [ str ( model ) ] [ "fields" ] . append ( key ) model_tmp = associate_model ( model , key ) dict_from_fields_r ( value , dico , model_tmp ) else : model_tmp = associate_model ( model , key ) dict_from_fields_r ( value , dico , model_tmp ) if magic_dico : dict_from_fields_r ( magic_dico , dico , self . model ) return dico
Build a dictionnary where searchable_fields are next to their model to be use in modelform_factory
46,606
def parse ( self ) : d = { 'pathname' : None , 'protocols' : self . _get_protocols ( ) , 'protocol' : 'ssh' , 'href' : self . _url , 'resource' : None , 'user' : None , 'port' : None , 'name' : None , 'owner' : None , } for regex in POSSIBLE_REGEXES : match = regex . search ( self . _url ) if match : d . update ( match . groupdict ( ) ) break else : msg = "Invalid URL '{}'" . format ( self . _url ) raise ParserError ( msg ) return Parsed ( ** d )
Parses a GIT URL and returns an object . Raises an exception on invalid URL .
46,607
def setup_errors ( app , error_template = "error.html" ) : def error_handler ( error ) : if isinstance ( error , HTTPException ) : description = error . get_description ( request . environ ) code = error . code name = error . name else : description = error code = 500 name = "Internal Server Error" return render_template ( error_template , error = error , code = code , name = Markup ( name ) , description = Markup ( description ) ) , code for exception in default_exceptions : app . register_error_handler ( exception , error_handler )
Add a handler for each of the available HTTP error responses .
46,608
def domain_search ( self , domain = None , company = None , limit = None , offset = None , emails_type = None , raw = False ) : if not domain and not company : raise MissingCompanyError ( 'You must supply at least a domain name or a company name' ) if domain : params = { 'domain' : domain , 'api_key' : self . api_key } elif company : params = { 'company' : company , 'api_key' : self . api_key } if limit : params [ 'limit' ] = limit if offset : params [ 'offset' ] = offset if emails_type : params [ 'type' ] = emails_type endpoint = self . base_endpoint . format ( 'domain-search' ) return self . _query_hunter ( endpoint , params , raw = raw )
Return all the email addresses found for a given domain .
46,609
def email_finder ( self , domain = None , company = None , first_name = None , last_name = None , full_name = None , raw = False ) : params = self . base_params if not domain and not company : raise MissingCompanyError ( 'You must supply at least a domain name or a company name' ) if domain : params [ 'domain' ] = domain elif company : params [ 'company' ] = company if not ( first_name and last_name ) and not full_name : raise MissingNameError ( 'You must supply a first name AND a last name OR a full name' ) if first_name and last_name : params [ 'first_name' ] = first_name params [ 'last_name' ] = last_name elif full_name : params [ 'full_name' ] = full_name endpoint = self . base_endpoint . format ( 'email-finder' ) res = self . _query_hunter ( endpoint , params , raw = raw ) if raw : return res email = res [ 'email' ] score = res [ 'score' ] return email , score
Find the email address of a person given its name and company s domain .
46,610
def email_verifier ( self , email , raw = False ) : params = { 'email' : email , 'api_key' : self . api_key } endpoint = self . base_endpoint . format ( 'email-verifier' ) return self . _query_hunter ( endpoint , params , raw = raw )
Verify the deliverability of a given email adress . abs
46,611
def account_information ( self , raw = False ) : params = self . base_params endpoint = self . base_endpoint . format ( 'account' ) res = self . _query_hunter ( endpoint , params , raw = raw ) if raw : return res res [ 'calls' ] [ 'left' ] = res [ 'calls' ] [ 'available' ] - res [ 'calls' ] [ 'used' ] return res
Gives the information about the account associated with the api_key .
46,612
def get_leads ( self , offset = None , limit = None , lead_list_id = None , first_name = None , last_name = None , email = None , company = None , phone_number = None , twitter = None ) : args = locals ( ) args_params = dict ( ( key , value ) for key , value in args . items ( ) if value is not None ) args_params . pop ( 'self' ) params = self . base_params params . update ( args_params ) endpoint = self . base_endpoint . format ( 'leads' ) return self . _query_hunter ( endpoint , params )
Gives back all the leads saved in your account .
46,613
def get_lead ( self , lead_id ) : params = self . base_params endpoint = self . base_endpoint . format ( 'leads/' + str ( lead_id ) ) return self . _query_hunter ( endpoint , params )
Get a specific lead saved on your account .
46,614
def create_lead ( self , first_name , last_name , email = None , position = None , company = None , company_industry = None , company_size = None , confidence_score = None , website = None , country_code = None , postal_code = None , source = None , linkedin_url = None , phone_number = None , twitter = None , leads_list_id = None ) : args = locals ( ) payload = dict ( ( key , value ) for key , value in args . items ( ) if value is not None ) payload . pop ( 'self' ) params = self . base_params endpoint = self . base_endpoint . format ( 'leads' ) return self . _query_hunter ( endpoint , params , 'post' , payload )
Create a lead on your account .
46,615
def get_leads_lists ( self , offset = None , limit = None ) : params = self . base_params if offset : params [ 'offset' ] = offset if limit : params [ 'limit' ] = limit endpoint = self . base_endpoint . format ( 'leads_lists' ) return self . _query_hunter ( endpoint , params )
Gives back all the leads lists saved on your account .
46,616
def create_leads_list ( self , name , team_id = None ) : params = self . base_params payload = { 'name' : name } if team_id : payload [ 'team_id' ] = team_id endpoint = self . base_endpoint . format ( 'leads_lists' ) return self . _query_hunter ( endpoint , params , 'post' , payload )
Create a leads list .
46,617
def update_leads_list ( self , leads_list_id , name , team_id = None ) : params = self . base_params payload = { 'name' : name } if team_id : payload [ 'team_id' ] = team_id endpoint = self . base_endpoint . format ( 'leads_lists/' + str ( leads_list_id ) ) return self . _query_hunter ( endpoint , params , 'put' , payload )
Update a leads list .
46,618
def delete_leads_list ( self , leads_list_id ) : params = self . base_params endpoint = self . base_endpoint . format ( 'leads_lists/' + str ( leads_list_id ) ) return self . _query_hunter ( endpoint , params , 'delete' )
Delete a leads list .
46,619
def to_int ( s ) : try : return int ( s . replace ( '_' , '' ) ) except ValueError : return int ( ast . literal_eval ( s ) )
converts a string to an integer
46,620
def dicts_from_lines ( lines ) : lines = iter ( lines ) for line in lines : line = line . strip ( ) if not line : continue try : yield json . loads ( line , object_pairs_hook = OrderedDict ) except json . decoder . JSONDecodeError : content = line + '' . join ( lines ) dicts = json . loads ( content , object_pairs_hook = OrderedDict ) if isinstance ( dicts , list ) : yield from dicts else : yield dicts
returns a generator producing dicts from json lines
46,621
def compose ( * funcs ) : def compose_two ( f1 , f2 ) : return lambda * args , ** kwargs : f1 ( f2 ( * args , ** kwargs ) ) return functools . reduce ( compose_two , funcs )
Compose any number of unary functions into a single unary function .
46,622
def method_caller ( method_name , * args , ** kwargs ) : def call_method ( target ) : func = getattr ( target , method_name ) return func ( * args , ** kwargs ) return call_method
Return a function that will call a named method on the target object with optional positional and keyword arguments .
46,623
def once ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : if not hasattr ( wrapper , 'saved_result' ) : wrapper . saved_result = func ( * args , ** kwargs ) return wrapper . saved_result wrapper . reset = lambda : vars ( wrapper ) . __delitem__ ( 'saved_result' ) return wrapper
Decorate func so it s only ever called the first time .
46,624
def method_cache ( method , cache_wrapper = None ) : cache_wrapper = cache_wrapper or lru_cache ( ) def wrapper ( self , * args , ** kwargs ) : bound_method = types . MethodType ( method , self ) cached_method = cache_wrapper ( bound_method ) setattr ( self , method . __name__ , cached_method ) return cached_method ( * args , ** kwargs ) return _special_method_cache ( method , cache_wrapper ) or wrapper
Wrap lru_cache to support storing the cache data in the object instances .
46,625
def _special_method_cache ( method , cache_wrapper ) : name = method . __name__ special_names = '__getattr__' , '__getitem__' if name not in special_names : return wrapper_name = '__cached' + name def proxy ( self , * args , ** kwargs ) : if wrapper_name not in vars ( self ) : bound = types . MethodType ( method , self ) cache = cache_wrapper ( bound ) setattr ( self , wrapper_name , cache ) else : cache = getattr ( self , wrapper_name ) return cache ( * args , ** kwargs ) return proxy
Because Python treats special methods differently it s not possible to use instance attributes to implement the cached methods .
46,626
def retry_call ( func , cleanup = lambda : None , retries = 0 , trap = ( ) ) : attempts = count ( ) if retries == float ( 'inf' ) else range ( retries ) for attempt in attempts : try : return func ( ) except trap : cleanup ( ) return func ( )
Given a callable func trap the indicated exceptions for up to retries times invoking cleanup on the exception . On the final attempt allow any exceptions to propagate .
46,627
def retry ( * r_args , ** r_kwargs ) : def decorate ( func ) : @ functools . wraps ( func ) def wrapper ( * f_args , ** f_kwargs ) : bound = functools . partial ( func , * f_args , ** f_kwargs ) return retry_call ( bound , * r_args , ** r_kwargs ) return wrapper return decorate
Decorator wrapper for retry_call . Accepts arguments to retry_call except func and then returns a decorator for the decorated function .
46,628
def print_yielded ( func ) : print_all = functools . partial ( map , print ) print_results = compose ( more_itertools . recipes . consume , print_all , func ) return functools . wraps ( func ) ( print_results )
Convert a generator into a function that prints all yielded elements
46,629
def pass_none ( func ) : @ functools . wraps ( func ) def wrapper ( param , * args , ** kwargs ) : if param is not None : return func ( param , * args , ** kwargs ) return wrapper
Wrap func so it s not called if its first param is None
46,630
def assign_params ( func , namespace ) : try : sig = inspect . signature ( func ) params = sig . parameters . keys ( ) except AttributeError : spec = inspect . getargspec ( func ) params = spec . args call_ns = { k : namespace [ k ] for k in params if k in namespace } return functools . partial ( func , ** call_ns )
Assign parameters from namespace where func solicits .
46,631
def save_method_args ( method ) : args_and_kwargs = collections . namedtuple ( 'args_and_kwargs' , 'args kwargs' ) @ functools . wraps ( method ) def wrapper ( self , * args , ** kwargs ) : attr_name = '_saved_' + method . __name__ attr = args_and_kwargs ( args , kwargs ) setattr ( self , attr_name , attr ) return method ( self , * args , ** kwargs ) return wrapper
Wrap a method such that when it is called the args and kwargs are saved on the method .
46,632
def row_col_maker ( app , fromdocname , all_needs , need_info , need_key , make_ref = False , ref_lookup = False , prefix = '' ) : row_col = nodes . entry ( ) para_col = nodes . paragraph ( ) if need_key in need_info and need_info [ need_key ] is not None : if not isinstance ( need_info [ need_key ] , ( list , set ) ) : data = [ need_info [ need_key ] ] else : data = need_info [ need_key ] for index , datum in enumerate ( data ) : link_id = datum link_part = None if need_key in [ 'links' , 'back_links' ] : if '.' in datum : link_id = datum . split ( '.' ) [ 0 ] link_part = datum . split ( '.' ) [ 1 ] datum_text = prefix + datum text_col = nodes . Text ( datum_text , datum_text ) if make_ref or ref_lookup : try : ref_col = nodes . reference ( "" , "" ) if not ref_lookup : ref_col [ 'refuri' ] = app . builder . get_relative_uri ( fromdocname , need_info [ 'docname' ] ) ref_col [ 'refuri' ] += "#" + datum else : temp_need = all_needs [ link_id ] ref_col [ 'refuri' ] = app . builder . get_relative_uri ( fromdocname , temp_need [ 'docname' ] ) ref_col [ 'refuri' ] += "#" + temp_need [ "id" ] if link_part is not None : ref_col [ 'refuri' ] += '.' + link_part except KeyError : para_col += text_col else : ref_col . append ( text_col ) para_col += ref_col else : para_col += text_col if index + 1 < len ( data ) : para_col += nodes . emphasis ( "; " , "; " ) row_col += para_col return row_col
Creates and returns a column .
46,633
def insert_blob ( filename , hosts = None , table = None ) : conn = connect ( hosts ) container = conn . get_blob_container ( table ) with open ( filename , 'rb' ) as f : digest = container . put ( f ) return '{server}/_blobs/{table}/{digest}' . format ( server = conn . client . active_servers [ 0 ] , table = table , digest = digest )
Upload a file into a blob table
46,634
def update_dois ( csv_source , write_file = True ) : _dois_arr = [ ] _dois_raw = [ ] with open ( csv_source , "r" ) as f : reader = csv . reader ( f ) for row in reader : _dois_arr . append ( row [ 0 ] ) for _doi in _dois_arr : _dois_raw . append ( _update_doi ( _doi ) ) if write_file : new_filename = os . path . splitext ( csv_source ) [ 0 ] write_json_to_file ( _dois_raw , new_filename ) else : print ( json . dumps ( _dois_raw , indent = 2 ) ) return
Get DOI publication info for a batch of DOIs . This is LiPD - independent and only requires a CSV file with all DOIs listed in a single column . The output is LiPD - formatted publication data for each entry .
46,635
def timeit ( hosts = None , stmt = None , warmup = 30 , repeat = None , duration = None , concurrency = 1 , output_fmt = None , fail_if = None , sample_mode = 'reservoir' ) : num_lines = 0 log = Logger ( output_fmt ) with Runner ( hosts , concurrency , sample_mode ) as runner : version_info = aio . run ( runner . client . get_server_version ) for line in as_statements ( lines_from_stdin ( stmt ) ) : runner . warmup ( line , warmup ) timed_stats = runner . run ( line , iterations = repeat , duration = duration ) r = Result ( version_info = version_info , statement = line , timed_stats = timed_stats , concurrency = concurrency ) log . result ( r ) if fail_if : eval_fail_if ( fail_if , r ) num_lines += 1 if num_lines == 0 : raise SystemExit ( 'No SQL statements provided. Use --stmt or provide statements via stdin' )
Run the given statement a number of times and return the runtime stats
46,636
def wait_until ( predicate , timeout = 30 ) : not_expired = Timeout ( timeout ) while not_expired ( ) : r = predicate ( ) if r : break
Wait until predicate returns a truthy value or the timeout is reached .
46,637
def _find_matching_version ( versions , version_pattern ) : pattern = fnmatch . translate ( version_pattern . replace ( 'x' , '*' ) ) return next ( ( v for v in versions if re . match ( pattern , v ) ) , None )
Return the first matching version
46,638
def _build_tarball ( src_repo ) -> str : run = partial ( subprocess . run , cwd = src_repo , check = True ) run ( [ 'git' , 'clean' , '-xdff' ] ) src_repo = Path ( src_repo ) if os . path . exists ( src_repo / 'es' / 'upstream' ) : run ( [ 'git' , 'submodule' , 'update' , '--init' , '--' , 'es/upstream' ] ) run ( [ './gradlew' , '--no-daemon' , 'clean' , 'distTar' ] ) distributions = Path ( src_repo ) / 'app' / 'build' / 'distributions' return next ( distributions . glob ( 'crate-*.tar.gz' ) )
Build a tarball from src and return the path to it
46,639
def _crates_cache ( ) -> str : return os . environ . get ( 'XDG_CACHE_HOME' , os . path . join ( os . path . expanduser ( '~' ) , '.cache' , 'cr8' , 'crates' ) )
Return the path to the crates cache folder
46,640
def get_crate ( version , crate_root = None ) : if not crate_root : crate_root = _crates_cache ( ) _remove_old_crates ( crate_root ) if _is_project_repo ( version ) : return _extract_tarball ( _build_tarball ( version ) ) m = BRANCH_VERSION_RE . match ( version ) if m : return _build_from_release_branch ( m . group ( 0 ) , crate_root ) uri = _lookup_uri ( version ) crate_dir = _download_and_extract ( uri , crate_root ) return crate_dir
Retrieve a Crate tarball extract it and return the path .
46,641
def _parse_options ( options : List [ str ] ) -> Dict [ str , str ] : try : return dict ( i . split ( '=' , maxsplit = 1 ) for i in options ) except ValueError : raise ArgumentError ( f'Option must be in format <key>=<value>, got: {options}' )
Parse repeatable CLI options
46,642
def run_crate ( version , env = None , setting = None , crate_root = None , keep_data = False , disable_java_magic = False , ) : with create_node ( version , env , setting , crate_root , keep_data , java_magic = not disable_java_magic , ) as n : try : n . start ( ) n . process . wait ( ) except KeyboardInterrupt : print ( 'Stopping Crate...' )
Launch a crate instance .
46,643
def _parse ( line ) : m = AddrConsumer . ADDRESS_RE . match ( line ) if not m : return None , None protocol = m . group ( 'protocol' ) protocol = AddrConsumer . PROTOCOL_MAP . get ( protocol , protocol ) return protocol , m . group ( 'addr' )
Parse protocol and bound address from log message
46,644
def _calc_block_mean_variance ( image , mask , blocksize ) : I = image . copy ( ) I_f = I . astype ( np . float32 ) / 255. result = np . zeros ( ( image . shape [ 0 ] / blocksize , image . shape [ 1 ] / blocksize ) , dtype = np . float32 ) for i in xrange ( 0 , image . shape [ 0 ] - blocksize , blocksize ) : for j in xrange ( 0 , image . shape [ 1 ] - blocksize , blocksize ) : patch = I_f [ i : i + blocksize + 1 , j : j + blocksize + 1 ] mask_patch = mask [ i : i + blocksize + 1 , j : j + blocksize + 1 ] tmp1 = np . zeros ( ( blocksize , blocksize ) ) tmp2 = np . zeros ( ( blocksize , blocksize ) ) mean , std_dev = cv2 . meanStdDev ( patch , tmp1 , tmp2 , mask_patch ) value = 0 if std_dev [ 0 ] [ 0 ] > MEAN_VARIANCE_THRESHOLD : value = mean [ 0 ] [ 0 ] result [ i / blocksize , j / blocksize ] = value small_image = cv2 . resize ( I , ( image . shape [ 1 ] / blocksize , image . shape [ 0 ] / blocksize ) ) res , inpaintmask = cv2 . threshold ( result , 0.02 , 1 , cv2 . THRESH_BINARY ) inpainted = cv2 . inpaint ( small_image , inpaintmask . astype ( np . uint8 ) , 5 , cv2 . INPAINT_TELEA ) res = cv2 . resize ( inpainted , ( image . shape [ 1 ] , image . shape [ 0 ] ) ) return res
Adaptively determines image background .
46,645
def threshold ( image , block_size = DEFAULT_BLOCKSIZE , mask = None ) : if mask is None : mask = np . zeros ( image . shape [ : 2 ] , dtype = np . uint8 ) mask [ : ] = 255 if len ( image . shape ) > 2 and image . shape [ 2 ] == 4 : image = cv2 . cvtColor ( image , cv2 . COLOR_BGRA2GRAY ) res = _calc_block_mean_variance ( image , mask , block_size ) res = image . astype ( np . float32 ) - res . astype ( np . float32 ) + 255 _ , res = cv2 . threshold ( res , 215 , 255 , cv2 . THRESH_BINARY ) return res
Applies adaptive thresholding to the given image .
46,646
def setup ( dist , attr , value ) : dist . metadata . version = find_version ( ** value ) . version
A hook for simplifying vcversioner use from distutils .
46,647
def to_insert ( table , d ) : columns = [ ] args = [ ] for key , val in d . items ( ) : columns . append ( '"{}"' . format ( key ) ) args . append ( val ) stmt = 'insert into {table} ({columns}) values ({params})' . format ( table = table , columns = ', ' . join ( columns ) , params = ', ' . join ( [ '?' ] * len ( columns ) ) ) return ( stmt , args )
Generate an insert statement using the given table and dictionary .
46,648
def insert_json ( table = None , bulk_size = 1000 , concurrency = 25 , hosts = None , output_fmt = None ) : if not hosts : return print_only ( table ) queries = ( to_insert ( table , d ) for d in dicts_from_stdin ( ) ) bulk_queries = as_bulk_queries ( queries , bulk_size ) print ( 'Executing inserts: bulk_size={} concurrency={}' . format ( bulk_size , concurrency ) , file = sys . stderr ) stats = Stats ( ) with clients . client ( hosts , concurrency = concurrency ) as client : f = partial ( aio . measure , stats , client . execute_many ) try : aio . run_many ( f , bulk_queries , concurrency ) except clients . SqlException as e : raise SystemExit ( str ( e ) ) try : print ( format_stats ( stats . get ( ) , output_fmt ) ) except KeyError : if not stats . sampler . values : raise SystemExit ( 'No data received via stdin' ) raise
Insert JSON lines fed into stdin into a Crate cluster .
46,649
def _get_dominant_angle ( lines , domination_type = MEDIAN ) : if domination_type == MEDIAN : return _get_median_angle ( lines ) elif domination_type == MEAN : return _get_mean_angle ( lines ) else : raise ValueError ( 'Unknown domination type provided: %s' % ( domination_type ) )
Picks dominant angle of a set of lines .
46,650
def _normalize_angle ( angle , range , step ) : while angle <= range [ 0 ] : angle += step while angle >= range [ 1 ] : angle -= step return angle
Finds an angle that matches the given one modulo step .
46,651
def get_collectors ( self , limit = 1000 , offset = 0 ) : options = { 'limit' : limit , 'offset' : offset , } request = requests . get ( self . url , params = options , auth = self . auth ) try : results = request . json ( ) [ 'collectors' ] except KeyError : results = request . json ( ) except json . decoder . JSONDecodeError : results = [ ] return results
Returns a dict of collectors .
46,652
def find ( self , name ) : collectors = self . get_collectors ( ) for collector in collectors : if name . lower ( ) == collector [ 'name' ] . lower ( ) : self . collector_id = collector [ 'id' ] return collector return { 'status' : 'No results found.' }
Returns a dict of collector s details if found .
46,653
def delete ( self , collector_id = None ) : cid = self . collector_id if collector_id : cid = collector_id url = '{0}/{1}' . format ( self . url , cid ) request = requests . delete ( url , auth = self . auth ) try : response = request . json ( ) except ValueError : response = { u'message' : u'The request completed successfully.' , u'status' : 200 , } return response
Delete a collector from inventory .
46,654
def info ( self , collector_id ) : cid = self . collector_id if collector_id : cid = collector_id url = '{0}/{1}' . format ( self . url , cid ) request = requests . get ( url , auth = self . auth ) return request . json ( )
Return a dict of collector .
46,655
def _bulk_size_generator ( num_records , bulk_size , active ) : while active and num_records > 0 : req_size = min ( num_records , bulk_size ) num_records -= req_size yield req_size
Generate bulk_size until num_records is reached or active becomes false
46,656
def insert_fake_data ( hosts = None , table = None , num_records = 1e5 , bulk_size = 1000 , concurrency = 25 , mapping_file = None ) : with clients . client ( hosts , concurrency = 1 ) as client : schema , table_name = parse_table ( table ) columns = retrieve_columns ( client , schema , table_name ) if not columns : sys . exit ( 'Could not find columns for table "{}"' . format ( table ) ) print ( 'Found schema: ' ) print ( json . dumps ( columns , sort_keys = True , indent = 4 ) ) mapping = None if mapping_file : mapping = json . load ( mapping_file ) bulk_size = min ( num_records , bulk_size ) num_inserts = int ( math . ceil ( num_records / bulk_size ) ) gen_row = create_row_generator ( columns , mapping ) stmt = to_insert ( '"{schema}"."{table_name}"' . format ( ** locals ( ) ) , columns ) [ 0 ] print ( 'Using insert statement: ' ) print ( stmt ) print ( 'Will make {} requests with a bulk size of {}' . format ( num_inserts , bulk_size ) ) print ( 'Generating fake data and executing inserts' ) q = asyncio . Queue ( maxsize = concurrency ) with clients . client ( hosts , concurrency = concurrency ) as client : active = [ True ] def stop ( ) : asyncio . ensure_future ( q . put ( None ) ) active . clear ( ) loop . remove_signal_handler ( signal . SIGINT ) if sys . platform != 'win32' : loop . add_signal_handler ( signal . SIGINT , stop ) bulk_seq = _bulk_size_generator ( num_records , bulk_size , active ) with ThreadPoolExecutor ( ) as e : tasks = asyncio . gather ( _gen_data_and_insert ( q , e , client , stmt , gen_row , bulk_seq ) , consume ( q , total = num_inserts ) ) loop . run_until_complete ( tasks )
Generate random data and insert it into a table .
46,657
def cast_values_csvs ( d , idx , x ) : try : d [ idx ] . append ( float ( x ) ) except ValueError : d [ idx ] . append ( x ) except KeyError as e : logger_misc . warn ( "cast_values_csv: KeyError: col: {}, {}" . format ( x , e ) ) return d
Attempt to cast string to float . If error keep as a string .
46,658
def cast_float ( x ) : try : x = float ( x ) except ValueError : try : x = x . strip ( ) except AttributeError as e : logger_misc . warn ( "parse_str: AttributeError: String not number or word, {}, {}" . format ( x , e ) ) return x
Attempt to cleanup string or convert to number value .
46,659
def cast_int ( x ) : try : x = int ( x ) except ValueError : try : x = x . strip ( ) except AttributeError as e : logger_misc . warn ( "parse_str: AttributeError: String not number or word, {}, {}" . format ( x , e ) ) return x
Cast unknown type into integer
46,660
def decimal_precision ( row ) : try : row = list ( row ) for idx , x in enumerate ( row ) : x = str ( x ) m = re . match ( re_sci_notation , x ) if m : _x2 = round ( float ( m . group ( 2 ) ) , 3 ) x = m . group ( 1 ) + str ( _x2 ) [ 1 : ] + m . group ( 3 ) else : try : x = round ( float ( x ) , 3 ) except ( ValueError , TypeError ) : x = x row [ idx ] = x row = tuple ( row ) except Exception as e : print ( "Error: Unable to fix the precision of values. File size may be larger than normal, {}" . format ( e ) ) return row
Change the precision of values before writing to CSV . Each value is rounded to 3 numbers .
46,661
def fix_coordinate_decimal ( d ) : try : for idx , n in enumerate ( d [ "geo" ] [ "geometry" ] [ "coordinates" ] ) : d [ "geo" ] [ "geometry" ] [ "coordinates" ] [ idx ] = round ( n , 5 ) except Exception as e : logger_misc . error ( "fix_coordinate_decimal: {}" . format ( e ) ) return d
Coordinate decimal degrees calculated by an excel formula are often too long as a repeating decimal . Round them down to 5 decimals
46,662
def generate_timestamp ( fmt = None ) : if fmt : time = dt . datetime . now ( ) . strftime ( fmt ) else : time = dt . date . today ( ) return str ( time )
Generate a timestamp to mark when this file was last modified .
46,663
def get_appended_name ( name , columns ) : loop = 0 while name in columns : loop += 1 if loop > 10 : logger_misc . warn ( "get_appended_name: Too many loops: Tried to get appended name but something looks wrong" ) break tmp = name + "-" + str ( loop ) if tmp not in columns : return tmp return name + "-99"
Append numbers to a name until it no longer conflicts with the other names in a column . Necessary to avoid overwriting columns and losing data . Loop a preset amount of times to avoid an infinite loop . There shouldn t ever be more than two or three identical variable names in a table .
46,664
def get_authors_as_str ( x ) : _authors = "" if isinstance ( x , str ) : return x elif isinstance ( x , list ) : if isinstance ( x [ 0 ] , str ) : for name in x [ : - 1 ] : _authors += str ( name ) + "; " _authors += str ( x [ - 1 ] ) elif isinstance ( x [ 0 ] , dict ) : try : for entry in x [ : - 1 ] : _authors += str ( entry [ "name" ] ) + "; " _authors += str ( x [ - 1 ] [ "name" ] ) except KeyError : logger_misc . warn ( "get_authors_as_str: KeyError: Authors incorrect data structure" ) else : logger_misc . debug ( "get_authors_as_str: TypeError: author/investigators isn't str or list: {}" . format ( type ( x ) ) ) return _authors
Take author or investigator data and convert it to a concatenated string of names . Author data structure has a few variations so account for all .
46,665
def get_dsn ( d ) : try : return d [ "dataSetName" ] except Exception as e : logger_misc . warn ( "get_dsn: Exception: No datasetname found, unable to continue: {}" . format ( e ) ) exit ( 1 )
Get the dataset name from a record
46,666
def get_ensemble_counts ( d ) : _rows_cols = { "rows" : 0 , "cols" : 0 } try : if len ( d ) == 1 : for var , data in d . items ( ) : _rows_cols [ "cols" ] += len ( data [ "values" ] ) _rows_cols [ "rows" ] = len ( data [ "values" ] [ 0 ] ) break elif len ( d ) == 2 : for var , data in d . items ( ) : if isinstance ( data [ "number" ] , list ) : _rows_cols [ "cols" ] += len ( data [ "values" ] ) else : _rows_cols [ "cols" ] += 1 _rows_cols [ "rows" ] = len ( data [ "values" ] ) except Exception as e : logger_misc . warn ( "get_ensemble_counts: {}" . format ( e ) ) return _rows_cols
Determine if this is a 1 or 2 column ensemble . Then determine how many columns and rows it has .
46,667
def get_missing_value_key ( d ) : _mv = "nan" try : _mv = d [ "missingValue" ] except KeyError as e : logger_misc . info ( "get_missing_value: No missing value key found: {}" . format ( e ) ) except AttributeError as e : logger_misc . warn ( "get_missing_value: Column is wrong data type: {}" . format ( e ) ) if not _mv : try : for k , v in d [ "columns" ] . items ( ) : _mv = v [ "missingValue" ] break except KeyError : pass return _mv
Get the Missing Value entry from a table of data . If none is found try the columns . If still none found prompt user .
46,668
def get_variable_name_col ( d ) : var = "" try : var = d [ "variableName" ] except KeyError : try : var = d [ "name" ] except KeyError : num = "unknown" if "number" in d : num = d [ "number" ] print ( "Error: column number <{}> is missing a variableName. Please fix." . format ( num ) ) logger_misc . info ( "get_variable_name_col: KeyError: missing key" ) return var
Get the variable name from a table or column
46,669
def get_table_key ( key , d , fallback = "" ) : try : var = d [ key ] return var except KeyError : logger_misc . info ( "get_variable_name_table: KeyError: missing {}, use name: {}" . format ( key , fallback ) ) return fallback
Try to get a table name from a data table
46,670
def load_fn_matches_ext ( file_path , file_type ) : correct_ext = False curr_ext = os . path . splitext ( file_path ) [ 1 ] exts = [ curr_ext , file_type ] try : if ".xlsx" in exts and ".xls" in exts : correct_ext = True elif curr_ext == file_type : correct_ext = True else : print ( "Use '{}' to load this file: {}" . format ( FILE_TYPE_MAP [ curr_ext ] [ "load_fn" ] , os . path . basename ( file_path ) ) ) except Exception as e : logger_misc . debug ( "load_fn_matches_ext: {}" . format ( e ) ) return correct_ext
Check that the file extension matches the target extension given .
46,671
def match_operators ( inp , relate , cut ) : logger_misc . info ( "enter match_operators" ) ops = { '>' : operator . gt , '<' : operator . lt , '>=' : operator . ge , '<=' : operator . le , '=' : operator . eq } try : truth = ops [ relate ] ( inp , cut ) except KeyError as e : truth = False logger_misc . warn ( "get_truth: KeyError: Invalid operator input: {}, {}" . format ( relate , e ) ) logger_misc . info ( "exit match_operators" ) return truth
Compare two items . Match a string operator to an operator function
46,672
def match_arr_lengths ( l ) : try : inner_len = len ( l [ 0 ] ) for i in l : if len ( i ) != inner_len : return False except IndexError : print ( "Error: Array data is not formatted correctly." ) return False except TypeError : print ( "Error: Array data missing" ) return False return True
Check that all the array lengths match so that a DataFrame can be created successfully .
46,673
def mv_files ( src , dst ) : files = os . listdir ( src ) for file in files : shutil . move ( os . path . join ( src , file ) , os . path . join ( dst , file ) ) return
Move all files from one directory to another
46,674
def normalize_name ( s ) : s = unicodedata . normalize ( 'NFKD' , s ) . encode ( 'ascii' , 'ignore' ) s = str ( s ) [ 2 : - 1 ] return s
Remove foreign accents and characters to normalize the string . Prevents encoding errors .
46,675
def path_type ( path , target ) : if os . path . isfile ( path ) and target == "file" : return True elif os . path . isdir ( path ) and target == "directory" : return True else : print ( "Error: Path given is not a {}: {}" . format ( target , path ) ) return False
Determine if given path is file directory or other . Compare with target to see if it s the type we wanted .
46,676
def print_filename ( path ) : if os . path . basename ( path ) : return os . path . basename ( path ) return path
Print out lipd filename that is being read or written
46,677
def prompt_protocol ( ) : stop = 3 ans = "" while True and stop > 0 : ans = input ( "Save as (d)ictionary or (o)bject?\n" "* Note:\n" "Dictionaries are more basic, and are compatible with Python v2.7+.\n" "Objects are more complex, and are only compatible with v3.4+ " ) if ans not in ( "d" , "o" ) : print ( "Invalid response: Please choose 'd' or 'o'" ) else : break if ans == "" : ans = "d" return ans
Prompt user if they would like to save pickle file as a dictionary or an object .
46,678
def rm_empty_doi ( d ) : logger_misc . info ( "enter remove_empty_doi" ) try : for pub in d [ 'pub' ] : if 'identifier' in pub : if 'id' in pub [ 'identifier' ] [ 0 ] : if pub [ 'identifier' ] [ 0 ] [ 'id' ] in EMPTY : del pub [ 'identifier' ] else : del pub [ 'identifier' ] except KeyError as e : logger_misc . warn ( "remove_empty_doi: KeyError: publication key not found, {}" . format ( e ) ) logger_misc . info ( "exit remove_empty_doi" ) return d
If an identifier dictionary has no doi ID then it has no use . Delete it .
46,679
def rm_files ( path , extension ) : files = list_files ( extension , path ) for file in files : if file . endswith ( extension ) : os . remove ( os . path . join ( path , file ) ) return
Remove all files in the given directory with the given extension
46,680
def rm_missing_values_table ( d ) : try : for k , v in d [ "columns" ] . items ( ) : d [ "columns" ] [ k ] = rm_keys_from_dict ( v , [ "missingValue" ] ) except Exception : pass return d
Loop for each table column and remove the missingValue key & data
46,681
def rm_keys_from_dict ( d , keys ) : for key in keys : if key in d : try : d . pop ( key , None ) except KeyError : pass return d
Given a dictionary and a key list remove any data in the dictionary with the given keys .
46,682
def _replace_missing_values_table ( values , mv ) : for idx , column in enumerate ( values ) : values [ idx ] = _replace_missing_values_column ( column , mv ) return values
Receive all table column values as a list of lists . Loop for each column of values
46,683
def _replace_missing_values_column ( values , mv ) : for idx , v in enumerate ( values ) : try : if v in EMPTY or v == mv : values [ idx ] = "nan" elif math . isnan ( float ( v ) ) : values [ idx ] = "nan" else : values [ idx ] = v except ( TypeError , ValueError ) : values [ idx ] = v return values
Replace missing values in the values list where applicable
46,684
def split_path_and_file ( s ) : _path = s _filename = "" try : x = os . path . split ( s ) _path = x [ 0 ] _filename = x [ 1 ] except Exception : print ( "Error: unable to split path" ) return _path , _filename
Given a full path to a file split and return a path and filename
46,685
def extract ( d , whichtables , mode , time ) : logger_ts . info ( "enter extract_main" ) _root = { } _ts = { } _pc = "paleoData" if mode == "chron" : _pc = "chronData" _root [ "mode" ] = _pc _root [ "time_id" ] = time try : for k , v in d . items ( ) : if k == "funding" : _root = _extract_fund ( v , _root ) elif k == "geo" : _root = _extract_geo ( v , _root ) elif k == 'pub' : _root = _extract_pub ( v , _root ) else : if k not in [ "chronData" , "paleoData" ] : _root [ k ] = v _ts = _extract_pc ( d , _root , _pc , whichtables ) except Exception as e : logger_ts . error ( "extract: Exception: {}" . format ( e ) ) print ( "extract: Exception: {}" . format ( e ) ) logger_ts . info ( "exit extract_main" ) return _ts
LiPD Version 1 . 3 Main function to initiate LiPD to TSOs conversion .
46,686
def _extract_method ( method ) : _method = { } for k , v in method . items ( ) : _method [ "method_" + k ] = v return _method
Make a timeseries - formatted version of model method data
46,687
def _extract_table_model ( table_data , current , tt ) : try : if tt in [ "summ" , "ens" ] : m = re . match ( re_sheet , table_data [ "tableName" ] ) if m : _pc_num = m . group ( 1 ) + "Number" current [ _pc_num ] = m . group ( 2 ) current [ "modelNumber" ] = m . group ( 4 ) current [ "tableNumber" ] = m . group ( 6 ) else : logger_ts . error ( "extract_table_summary: Unable to parse paleo/model/table numbers" ) except Exception as e : logger_ts . error ( "extract_table_summary: {}" . format ( e ) ) return current
Add in modelNumber and summaryNumber fields if this is a summary table
46,688
def _extract_table ( table_data , current , pc , ts , tt ) : current [ "tableType" ] = tt current = _extract_table_root ( table_data , current , pc ) current = _extract_table_model ( table_data , current , tt ) _table_tmp = _extract_special ( current , table_data ) try : for _col_name , _col_data in table_data [ "columns" ] . items ( ) : _col_tmp = _extract_columns ( _col_data , copy . deepcopy ( _table_tmp ) , pc ) try : ts . append ( _col_tmp ) except Exception as e : logger_ts . warn ( "extract_table: Unable to create ts entry, {}" . format ( e ) ) except Exception as e : logger_ts . error ( "extract_table: {}" . format ( e ) ) return ts
Use the given table data to create a time series entry for each column in the table .
46,689
def collapse ( l , raw ) : logger_ts . info ( "enter collapse" ) _master = { } _dsn = "" try : _pc = l [ 0 ] [ "mode" ] for entry in l : dsn = entry [ 'dataSetName' ] _dsn = dsn _current = entry if dsn not in _master : logger_ts . info ( "collapsing: {}" . format ( dsn ) ) print ( "collapsing: {}" . format ( dsn ) ) _master , _current = _collapse_root ( _master , _current , dsn , _pc ) try : _master [ dsn ] [ "paleoData" ] = raw [ dsn ] [ "paleoData" ] if "chronData" in raw [ dsn ] : _master [ dsn ] [ "chronData" ] = raw [ dsn ] [ "chronData" ] except KeyError as e : print ( "collapse: Could not collapse an object the dataset: {}, {}" . format ( dsn , e ) ) _master = _collapse_pc ( _master , _current , dsn , _pc ) if len ( _master ) == 1 : _master = _master [ _dsn ] print ( "Created LiPD data: 1 dataset" ) else : print ( "Created LiPD data: {} datasets" . format ( len ( _master ) ) ) except Exception as e : print ( "Error: Unable to collapse time series, {}" . format ( e ) ) logger_ts . error ( "collapse: Exception: {}" . format ( e ) ) logger_ts . info ( "exit collapse" ) return _master
LiPD Version 1 . 3 Main function to initiate time series to LiPD conversion
46,690
def _get_current_names ( current , dsn , pc ) : _table_name = "" _variable_name = "" try : _table_name = current [ '{}_tableName' . format ( pc ) ] _variable_name = current [ '{}_variableName' . format ( pc ) ] except Exception as e : print ( "Error: Unable to collapse time series: {}, {}" . format ( dsn , e ) ) logger_ts . error ( "get_current: {}, {}" . format ( dsn , e ) ) return _table_name , _variable_name
Get the table name and variable name from the given time series entry
46,691
def _collapse_pc ( master , current , dsn , pc ) : logger_ts . info ( "enter collapse_paleo" ) _table_name , _variable_name = _get_current_names ( current , dsn , pc ) try : _m = re . match ( re_sheet_w_number , _table_name ) _switch = { "meas" : "measurementTable" , "summ" : "summaryTable" , "ens" : "ensembleTable" } _ms = _switch [ current [ "tableType" ] ] if _ms == "measurementTable" : if _table_name not in master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ _ms ] : _tmp_table = _collapse_table_root ( current , dsn , pc ) master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ _ms ] [ _table_name ] = _tmp_table _tmp_column = _collapse_column ( current , pc ) master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ _ms ] [ _table_name ] [ 'columns' ] [ _variable_name ] = _tmp_column elif _ms in [ "ensembleTable" , "summaryTable" ] : if _table_name not in master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ "model" ] [ _m . group ( 1 ) + _m . group ( 2 ) ] [ _ms ] : _tmp_table = _collapse_table_root ( current , dsn , pc ) master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ "model" ] [ _m . group ( 1 ) + _m . group ( 2 ) ] [ _ms ] [ _table_name ] = _tmp_table _tmp_column = _collapse_column ( current , pc ) master [ dsn ] [ pc ] [ _m . group ( 1 ) ] [ "model" ] [ _m . group ( 1 ) + _m . group ( 2 ) ] [ _ms ] [ _table_name ] [ "columns" ] [ _variable_name ] = _tmp_column except Exception as e : print ( "Error: Unable to collapse column data: {}, {}" . format ( dsn , e ) ) logger_ts . error ( "collapse_paleo: {}, {}, {}" . format ( dsn , _variable_name , e ) ) return master
Collapse the paleo or chron for the current time series entry
46,692
def _to_http_hosts ( hosts : Union [ Iterable [ str ] , str ] ) -> List [ str ] : if isinstance ( hosts , str ) : hosts = hosts . replace ( ',' , ' ' ) . split ( ) return [ _to_http_uri ( i ) for i in hosts ]
Convert a string of whitespace or comma separated hosts into a list of hosts .
46,693
def _plain_or_callable ( obj ) : if callable ( obj ) : return obj ( ) elif isinstance ( obj , types . GeneratorType ) : return next ( obj ) else : return obj
Returns the value of the called object of obj is a callable otherwise the plain object . Returns None if obj is None .
46,694
def _to_dsn ( hosts ) : p = urlparse ( hosts ) try : user_and_pw , netloc = p . netloc . split ( '@' , maxsplit = 1 ) except ValueError : netloc = p . netloc user_and_pw = 'crate' try : host , port = netloc . split ( ':' , maxsplit = 1 ) except ValueError : host = netloc port = 5432 dbname = p . path [ 1 : ] if p . path else 'doc' dsn = f'postgres://{user_and_pw}@{host}:{port}/{dbname}' if p . query : dsn += '?' + '&' . join ( k + '=' + v [ 0 ] for k , v in parse_qs ( p . query ) . items ( ) ) return dsn
Convert a host URI into a dsn for aiopg .
46,695
def _verify_ssl_from_first ( hosts ) : for host in hosts : query = parse_qs ( urlparse ( host ) . query ) if 'verify_ssl' in query : return _to_boolean ( query [ 'verify_ssl' ] [ 0 ] ) return True
Check if SSL validation parameter is passed in URI
46,696
def addTable ( D ) : _swap = { "1" : "measurement" , "2" : "summary" , "3" : "ensemble" , "4" : "distribution" } print ( "What type of table would you like to add?\n" "1: measurement\n" "2: summary\n" "3: ensemble (under development)\n" "4: distribution (under development)\n" "\n Note: if you want to add a whole model, use the addModel() function" ) _ans = input ( ">" ) if _ans in [ "3" , "4" ] : print ( "I don't know how to do that yet." ) elif _ans in [ "1" , "2" ] : print ( "Locate the CSV file with the values for this table: " ) _path , _files = browse_dialog_file ( ) _path = _confirm_file_path ( _files ) _values = read_csv_from_file ( _path ) _table = _build_table ( _values ) _placement = _prompt_placement ( D , _swap [ _ans ] ) D = _put_table ( D , _placement , _table ) else : print ( "That's not a valid option" ) return D
Add any table type to the given dataset . Use prompts to determine index locations and table type .
46,697
def _prompt_placement ( D , tt ) : _model_name = "" _placement_options = _get_available_placements ( D , tt ) print ( "Please choose where you'd like to place this model:" ) for _idx , _opt in enumerate ( _placement_options ) : print ( "({}) {}" . format ( _idx , _opt ) ) _choice = input ( "> " ) try : if int ( _choice ) <= len ( _placement_options ) and _choice : _model_name = _placement_options [ int ( _choice ) ] else : print ( "Invalid choice input" ) return except Exception as e : print ( "Invalid choice" ) return _model_name
Since automatic placement didn t work find somewhere to place the model data manually with the help of the user .
46,698
def _put_table ( D , name , table ) : try : table [ "tableName" ] = name m = re . match ( re_table_name , name ) if m : _pc = m . group ( 1 ) + "Data" _section = m . group ( 1 ) + m . group ( 2 ) if m . group ( 3 ) == "measurement" : if name in D [ _pc ] [ _section ] [ "measurementTable" ] : print ( "Oops. This shouldn't happen. That table path is occupied in the dataset" ) else : D [ _pc ] [ _section ] [ "measurementTable" ] [ name ] = table else : _model = _section + m . group ( 3 ) + m . group ( 4 ) _tt = m . group ( 5 ) + "Table" if name in D [ _pc ] [ _model ] [ _tt ] : print ( "Oops. This shouldn't happen. That table path is occupied in the dataset" ) else : D [ _pc ] [ _model ] [ _tt ] [ name ] = table else : print ( "Oops. This shouldn't happen. That table name doesn't look right. Please report this error" ) return except Exception as e : print ( "addTable: Unable to put the table data into the dataset, {}" . format ( e ) ) return D
Use the dataset and name to place the new table data into the dataset .
46,699
def addModel ( D , models ) : try : for _model_name , _model_data in models . items ( ) : _m = re . match ( re_model_name , _model_name ) if _m : D = _put_model ( D , _model_name , _model_data , _m ) else : print ( "The table name found in the given model data isn't valid for automatic placement" ) _placement_name = _prompt_placement ( D , "model" ) _m = re . match ( re_model_name , _placement_name ) if _m : D = _put_model ( D , _placement_name , _model_data , _m ) else : print ( "Oops. This shouldn't happen. That table name doesn't look right. Please report this error" ) return except Exception as e : print ( "addModel: Model data NOT added, {}" . format ( e ) ) return D
Insert model data into a LiPD dataset