idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
9,100
def export_to_file ( self , filepath , level_prefix = ' ' ) : xmldom = self . export_to_dom ( ) xmlstr = xmldom . toprettyxml ( level_prefix , '\n' , ) f = open ( filepath , 'w' ) f . write ( xmlstr ) f . close ( )
Exports this model to a file .
77
8
9,101
def resolve ( self ) : model = self . copy ( ) for ct in model . component_types : model . resolve_component_type ( ct ) for c in model . components : if c . id not in model . fat_components : model . add ( model . fatten_component ( c ) ) for c in ct . constants : c2 = c . copy ( ) c2 . numeric_value = model . get_numeric_value ( c2 . value , c2 . dimension ) model . add ( c2 ) return model
Resolves references in this model .
119
7
9,102
def resolve_component_type ( self , component_type ) : # Resolve component type from base types if present. if component_type . extends : try : base_ct = self . component_types [ component_type . extends ] except : raise ModelError ( "Component type '{0}' trying to extend unknown component type '{1}'" , component_type . name , component_type . extends ) self . resolve_component_type ( base_ct ) self . merge_component_types ( component_type , base_ct ) component_type . types = set . union ( component_type . types , base_ct . types ) component_type . extends = None
Resolves references in the specified component type .
145
9
9,103
def merge_component_types ( self , ct , base_ct ) : #merge_maps(ct.parameters, base_ct.parameters) for parameter in base_ct . parameters : if parameter . name in ct . parameters : p = ct . parameters [ parameter . name ] basep = base_ct . parameters [ parameter . name ] if p . fixed : p . value = p . fixed_value p . dimension = basep . dimension else : ct . parameters [ parameter . name ] = base_ct . parameters [ parameter . name ] merge_maps ( ct . properties , base_ct . properties ) merge_maps ( ct . derived_parameters , base_ct . derived_parameters ) merge_maps ( ct . index_parameters , base_ct . index_parameters ) merge_maps ( ct . constants , base_ct . constants ) merge_maps ( ct . exposures , base_ct . exposures ) merge_maps ( ct . requirements , base_ct . requirements ) merge_maps ( ct . component_requirements , base_ct . component_requirements ) merge_maps ( ct . instance_requirements , base_ct . instance_requirements ) merge_maps ( ct . children , base_ct . children ) merge_maps ( ct . texts , base_ct . texts ) merge_maps ( ct . links , base_ct . links ) merge_maps ( ct . paths , base_ct . paths ) merge_maps ( ct . event_ports , base_ct . event_ports ) merge_maps ( ct . component_references , base_ct . component_references ) merge_maps ( ct . attachments , base_ct . attachments ) merge_maps ( ct . dynamics . state_variables , base_ct . dynamics . state_variables ) merge_maps ( ct . dynamics . derived_variables , base_ct . dynamics . derived_variables ) merge_maps ( ct . dynamics . conditional_derived_variables , base_ct . dynamics . conditional_derived_variables ) merge_maps ( ct . dynamics . time_derivatives , base_ct . dynamics . time_derivatives ) #merge_lists(ct.dynamics.event_handlers, base_ct.dynamics.event_handlers) merge_maps ( ct . dynamics . kinetic_schemes , base_ct . dynamics . kinetic_schemes ) merge_lists ( ct . structure . event_connections , base_ct . structure . event_connections ) merge_lists ( ct . structure . child_instances , base_ct . structure . child_instances ) merge_lists ( ct . structure . multi_instantiates , base_ct . structure . multi_instantiates ) merge_maps ( ct . simulation . runs , base_ct . simulation . runs ) merge_maps ( ct . simulation . records , base_ct . simulation . records ) merge_maps ( ct . simulation . event_records , base_ct . simulation . event_records ) merge_maps ( ct . simulation . data_displays , base_ct . simulation . data_displays ) merge_maps ( ct . simulation . data_writers , base_ct . simulation . data_writers ) merge_maps ( ct . simulation . event_writers , base_ct . simulation . event_writers )
Merge various maps in the given component type from a base component type .
755
15
9,104
def resolve_simulation ( self , fc , ct ) : for run in ct . simulation . runs : try : run2 = Run ( fc . component_references [ run . component ] . referenced_component , run . variable , fc . parameters [ run . increment ] . numeric_value , fc . parameters [ run . total ] . numeric_value ) except : raise ModelError ( "Unable to resolve simulation run parameters in component '{0}'" , fc . id ) fc . simulation . add ( run2 ) for record in ct . simulation . records : try : record2 = Record ( fc . paths [ record . quantity ] . value , fc . parameters [ record . scale ] . numeric_value if record . scale else 1 , fc . texts [ record . color ] . value if record . color else '#000000' ) except : raise ModelError ( "Unable to resolve simulation record parameters in component '{0}'" , fc . id ) fc . simulation . add ( record2 ) for event_record in ct . simulation . event_records : try : event_record2 = EventRecord ( fc . paths [ event_record . quantity ] . value , fc . texts [ event_record . eventPort ] . value ) except : raise ModelError ( "Unable to resolve simulation event_record parameters in component '{0}'" , fc . id ) fc . simulation . add ( event_record2 ) for dd in ct . simulation . data_displays : try : dd2 = DataDisplay ( fc . texts [ dd . title ] . value , '' ) if 'timeScale' in fc . parameters : dd2 . timeScale = fc . parameters [ 'timeScale' ] . numeric_value except : raise ModelError ( "Unable to resolve simulation display parameters in component '{0}'" , fc . id ) fc . simulation . add ( dd2 ) for dw in ct . simulation . data_writers : try : path = '.' if fc . texts [ dw . path ] and fc . texts [ dw . path ] . value : path = fc . texts [ dw . path ] . value dw2 = DataWriter ( path , fc . texts [ dw . file_name ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( dw2 ) for ew in ct . simulation . event_writers : try : path = '.' if fc . texts [ ew . path ] and fc . texts [ ew . path ] . value : path = fc . texts [ ew . path ] . value ew2 = EventWriter ( path , fc . texts [ ew . file_name ] . value , fc . texts [ ew . format ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( ew2 )
Resolve simulation specifications .
669
5
9,105
def get_numeric_value ( self , value_str , dimension = None ) : n = None i = len ( value_str ) while n is None : try : part = value_str [ 0 : i ] nn = float ( part ) n = nn s = value_str [ i : ] except ValueError : i = i - 1 number = n sym = s numeric_value = None if sym == '' : numeric_value = number else : if sym in self . units : unit = self . units [ sym ] if dimension : if dimension != unit . dimension and dimension != '*' : raise SimBuildError ( "Unit symbol '{0}' cannot " "be used for dimension '{1}'" , sym , dimension ) else : dimension = unit . dimension numeric_value = ( number * ( 10 ** unit . power ) * unit . scale ) + unit . offset else : raise SimBuildError ( "Unknown unit symbol '{0}'. Known: {1}" , sym , self . units ) #print("Have converted %s to value: %s, dimension %s"%(value_str, numeric_value, dimension)) return numeric_value
Get the numeric value for a parameter value specification .
251
10
9,106
def start_msstitch ( exec_drivers , sysargs ) : parser = populate_parser ( exec_drivers ) args = parser . parse_args ( sysargs [ 1 : ] ) args . func ( * * vars ( args ) )
Passed all drivers of executable checks which command is passed to the executable and then gets the options for a driver parses them from command line and runs the driver
52
32
9,107
def merged ( * dicts , * * kwargs ) : if not dicts : return Struct ( ) result = dict ( ) for d in dicts : result . update ( d ) result . update ( kwargs ) struct_type = type ( dicts [ 0 ] ) return struct_type ( * * result )
Merge dictionaries . Later keys overwrite .
69
9
9,108
def order_derived_parameters ( component ) : if len ( component . derived_parameters ) == 0 : return [ ] ordering = [ ] dps = [ ] for dp in component . derived_parameters : dps . append ( dp . name ) maxcount = 5 count = maxcount while count > 0 and dps != [ ] : count = count - 1 for dp1 in dps : #exp_tree = regime.derived_variables[dv1].expression_tree value = component . derived_parameters [ dp1 ] . value found = False for dp2 in dps : if dp1 != dp2 and dp2 in value : found = True if not found : ordering . append ( dp1 ) del dps [ dps . index ( dp1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "parameter in component '{0}'" ) . format ( component ) ) #return ordering + dvsnoexp return ordering
Finds ordering of derived_parameters .
230
9
9,109
def order_derived_variables ( regime ) : ordering = [ ] dvs = [ ] dvsnoexp = [ ] maxcount = 5 for dv in regime . derived_variables : if dv . expression_tree == None : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) for dv in regime . conditional_derived_variables : if len ( dv . cases ) == 0 : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) count = maxcount while count > 0 and dvs != [ ] : count = count - 1 for dv1 in dvs : if dv1 in regime . derived_variables : dv = regime . derived_variables [ dv1 ] else : dv = regime . conditional_derived_variables [ dv1 ] found = False if isinstance ( dv , DerivedVariable ) : exp_tree = dv . expression_tree for dv2 in dvs : if dv1 != dv2 and is_var_in_exp_tree ( dv2 , exp_tree ) : found = True else : for case in dv . cases : for dv2 in dvs : if dv1 != dv2 and ( is_var_in_exp_tree ( dv2 , case . condition_expression_tree ) or is_var_in_exp_tree ( dv2 , case . value_expression_tree ) ) : found = True if not found : ordering . append ( dv1 ) del dvs [ dvs . index ( dv1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "variables in regime '{0}'" ) . format ( regime . name ) ) #return ordering + dvsnoexp return dvsnoexp + ordering
Finds ordering of derived_variables .
423
9
9,110
def build ( self ) : self . sim = Simulation ( ) for component_id in self . model . targets : if component_id not in self . model . components : raise SimBuildError ( "Unable to find target component '{0}'" , component_id ) component = self . model . fat_components [ component_id ] runnable = self . build_runnable ( component ) self . sim . add_runnable ( runnable ) return self . sim
Build the simulation components from the model .
105
8
9,111
def build_event_connections ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_event_connections of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) # Process event connections for ec in structure . event_connections : if self . debug : print ( ec . toxml ( ) ) source = runnable . parent . resolve_path ( ec . from_ ) target = runnable . parent . resolve_path ( ec . to ) if ec . receiver : receiver_template = self . build_runnable ( ec . receiver , target ) #receiver = copy.deepcopy(receiver_template) receiver = receiver_template . copy ( ) receiver . id = "{0}__{1}__" . format ( component . id , receiver_template . id ) if ec . receiver_container : target . add_attachment ( receiver , ec . receiver_container ) target . add_child ( receiver_template . id , receiver ) target = receiver else : source = runnable . resolve_path ( ec . from_ ) target = runnable . resolve_path ( ec . to ) source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
Adds event connections to a runnable component based on the structure specifications in the component model .
524
19
9,112
def build_structure ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_structure of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) # Process single-child instantiations for ch in structure . child_instances : child_runnable = self . build_runnable ( ch . referenced_component , runnable ) runnable . add_child ( child_runnable . id , child_runnable ) runnable . add_child_typeref ( ch . component , child_runnable ) # Process multi-child instatiantions for mi in structure . multi_instantiates : template = self . build_runnable ( mi . component , runnable ) for i in range ( mi . number ) : #instance = copy.deepcopy(template) instance = template . copy ( ) instance . id = "{0}__{1}__{2}" . format ( component . id , template . id , i ) runnable . array . append ( instance ) # Process foreach statements for fe in structure . for_eachs : self . build_foreach ( component , runnable , fe ) self . build_event_connections ( component , runnable , structure )
Adds structure to a runnable component based on the structure specifications in the component model .
299
18
9,113
def build_foreach ( self , component , runnable , foreach , name_mappings = { } ) : if self . debug : print ( "\n++++++++ Calling build_foreach of %s with runnable %s, parent %s, name_mappings: %s" % ( component . id , runnable . id , runnable . parent , name_mappings ) ) target_array = runnable . resolve_path ( foreach . instances ) for target_runnable in target_array : if self . debug : print ( "Applying contents of for_each to %s, as %s" % ( target_runnable . id , foreach . as_ ) ) name_mappings [ foreach . as_ ] = target_runnable # Process foreach statements for fe2 in foreach . for_eachs : #print fe2.toxml() target_array2 = runnable . resolve_path ( fe2 . instances ) for target_runnable2 in target_array2 : name_mappings [ fe2 . as_ ] = target_runnable2 self . build_foreach ( component , runnable , fe2 , name_mappings ) # Process event connections for ec in foreach . event_connections : source = name_mappings [ ec . from_ ] target = name_mappings [ ec . to ] source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
Iterate over ForEach constructs and process nested elements .
558
11
9,114
def process_simulation_specs ( self , component , runnable , simulation ) : # Process runs for run in simulation . runs : cid = run . component . id + '_' + component . id target = self . build_runnable ( run . component , runnable , cid ) self . sim . add_runnable ( target ) self . current_record_target = target target . configure_time ( run . increment , run . total )
Process simulation - related aspects to a runnable component based on the dynamics specifications in the component model .
102
21
9,115
def build_expression_from_tree ( self , runnable , regime , tree_node ) : component_type = self . model . component_types [ runnable . component . type ] dynamics = component_type . dynamics if tree_node . type == ExprNode . VALUE : if tree_node . value [ 0 ] . isalpha ( ) : if tree_node . value == 't' : return 'self.time_completed' elif tree_node . value in component_type . requirements : var_prefix = 'self' v = tree_node . value r = runnable while ( v not in r . instance_variables and v not in r . derived_variables ) : var_prefix = '{0}.{1}' . format ( var_prefix , 'parent' ) r = r . parent if r == None : raise SimBuildError ( "Unable to resolve required " "variable '{0}'" . format ( v ) ) return '{0}.{1}' . format ( var_prefix , v ) elif ( tree_node . value in dynamics . derived_variables or ( regime is not None and tree_node . value in regime . derived_variables ) ) : return 'self.{0}' . format ( tree_node . value ) else : return 'self.{0}_shadow' . format ( tree_node . value ) else : return tree_node . value elif tree_node . type == ExprNode . FUNC1 : pattern = '({0}({1}))' func = self . convert_func ( tree_node . func ) if 'random.uniform' in func : pattern = '({0}(0,{1}))' return pattern . format ( func , self . build_expression_from_tree ( runnable , regime , tree_node . param ) ) else : return '({0}) {1} ({2})' . format ( self . build_expression_from_tree ( runnable , regime , tree_node . left ) , self . convert_op ( tree_node . op ) , self . build_expression_from_tree ( runnable , regime , tree_node . right ) )
Recursively builds a Python expression from a parsed expression tree .
484
13
9,116
def build_event_handler ( self , runnable , regime , event_handler ) : if isinstance ( event_handler , OnCondition ) : return self . build_on_condition ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEvent ) : return self . build_on_event ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnStart ) : return self . build_on_start ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEntry ) : return self . build_on_entry ( runnable , regime , event_handler ) else : return [ ]
Build event handler code .
152
5
9,117
def build_on_condition ( self , runnable , regime , on_condition ) : on_condition_code = [ ] on_condition_code += [ 'if {0}:' . format ( self . build_expression_from_tree ( runnable , regime , on_condition . expression_tree ) ) ] for action in on_condition . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_condition_code += [ ' ' + line ] return on_condition_code
Build OnCondition event handler code .
118
7
9,118
def build_on_event ( self , runnable , regime , on_event ) : on_event_code = [ ] if self . debug : on_event_code += [ 'print("Maybe handling something for %s ("+str(id(self))+")")' % ( runnable . id ) , 'print("EICs ("+str(id(self))+"): "+str(self.event_in_counters))' ] on_event_code += [ 'count = self.event_in_counters[\'{0}\']' . format ( on_event . port ) , 'while count > 0:' , ' print(" Handling event")' if self . debug else '' , ' count -= 1' ] for action in on_event . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_event_code += [ ' ' + line ] on_event_code += [ 'self.event_in_counters[\'{0}\'] = 0' . format ( on_event . port ) , ] return on_event_code
Build OnEvent event handler code .
247
7
9,119
def build_on_start ( self , runnable , regime , on_start ) : on_start_code = [ ] for action in on_start . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_start_code += [ line ] return on_start_code
Build OnStart start handler code .
73
7
9,120
def build_on_entry ( self , runnable , regime , on_entry ) : on_entry_code = [ ] on_entry_code += [ 'if self.current_regime != self.last_regime:' ] on_entry_code += [ ' self.last_regime = self.current_regime' ] for action in on_entry . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_entry_code += [ ' ' + line ] return on_entry_code
Build OnEntry start handler code .
123
7
9,121
def build_action ( self , runnable , regime , action ) : if isinstance ( action , StateAssignment ) : return self . build_state_assignment ( runnable , regime , action ) if isinstance ( action , EventOut ) : return self . build_event_out ( action ) if isinstance ( action , Transition ) : return self . build_transition ( action ) else : return [ 'pass' ]
Build event handler action code .
93
6
9,122
def build_state_assignment ( self , runnable , regime , state_assignment ) : return [ 'self.{0} = {1}' . format ( state_assignment . variable , self . build_expression_from_tree ( runnable , regime , state_assignment . expression_tree ) ) ]
Build state assignment code .
72
5
9,123
def build_event_out ( self , event_out ) : event_out_code = [ 'if "{0}" in self.event_out_callbacks:' . format ( event_out . port ) , ' for c in self.event_out_callbacks[\'{0}\']:' . format ( event_out . port ) , ' c()' ] return event_out_code
Build event out code .
87
5
9,124
def build_reduce_code ( self , result , select , reduce ) : select = select . replace ( '/' , '.' ) select = select . replace ( ' ' , '' ) if reduce == 'add' : reduce_op = '+' acc_start = 0 else : reduce_op = '*' acc_start = 1 #bits = select.split('[*]') bits = re . split ( '\[.*\]' , select ) seps = re . findall ( '\[.*\]' , select ) code = [ 'self.{0} = {1}' . format ( result , acc_start ) ] code += [ 'self.{0}_shadow = {1}' . format ( result , acc_start ) ] code += [ 'try:' ] if len ( bits ) == 1 : target = select code += [ ' self.{0} = self.{1}' . format ( result , target ) ] code += [ ' self.{0}_shadow = self.{1}' . format ( result , target ) ] elif len ( bits ) == 2 : sep = seps [ 0 ] [ 1 : - 1 ] if sep == '*' : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : bits2 = sep . split ( '=' ) if len ( bits2 ) > 1 : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' if o.{0} == {1}:' . format ( bits2 [ 0 ] , bits2 [ 1 ] ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) code += [ 'except:' ] code += [ ' pass' ] return code
Builds a reduce operation on the selected target range .
601
11
9,125
def add_recording_behavior ( self , component , runnable ) : simulation = component . simulation for rec in simulation . records : rec . id = runnable . id self . current_record_target . add_variable_recorder ( self . current_data_output , rec )
Adds recording - related dynamics to a runnable component based on the dynamics specifications in the component model .
63
21
9,126
def check_static_member_vars ( class_ , fpath = None , only_init = True ) : #import ast #import astor import utool as ut if isinstance ( class_ , six . string_types ) : classname = class_ if fpath is None : raise Exception ( 'must specify fpath' ) else : # We were given a live object if not isinstance ( class_ , type ) : # We were given the class instance not the class class_instance = class_ class_ = class_instance . __class__ classname = class_ . __name__ if fpath is None : module = ut . get_module_from_class ( class_ ) fpath = ut . get_modpath ( module ) sourcecode = ut . readfrom ( fpath ) import redbaron # Pares a FULL syntax tree that keeps blockcomments baron = redbaron . RedBaron ( sourcecode ) for node in baron : if node . type == 'class' and node . name == classname : classnode = node break def find_parent_method ( node ) : par = node . parent_find ( 'def' ) if par is not None and par . parent is not None : if par . parent . type == 'class' : return par else : return find_parent_method ( par ) # TODO: Find inherited attrs #classnode.inherit_from # inhertied_attrs = ['parent'] # inhertied_attrs = [] class_methods = [ ] for node in classnode : if node . type == 'def' : if only_init : if node . name == '__init__' : class_methods . append ( node ) else : class_methods . append ( node ) class_vars = [ ] self_vars = [ ] for method_node in class_methods : self_var = method_node . arguments [ 0 ] . dumps ( ) self_vars . append ( self_var ) for assign in method_node . find_all ( 'assignment' ) : # method_node = find_parent_method(assign) if assign . target . dumps ( ) . startswith ( self_var + '.' ) : class_vars . append ( assign . target . value [ 1 ] . dumps ( ) ) static_attrs = ut . unique ( class_vars ) return static_attrs # class_members = ut.unique(class_vars + class_methods + inhertied_attrs) if False : self_var = self_vars [ 0 ] # Find everything that is used complex_cases = [ ] simple_cases = [ ] all_self_ref = classnode . find_all ( 'name_' , value = re . compile ( '.*' + self_var + '\\.*' ) ) for x in all_self_ref : if x . parent . type == 'def_argument' : continue if x . parent . type == 'atomtrailers' : atom = x . parent if ut . depth ( atom . fst ( ) ) <= 3 : simple_cases . append ( atom ) else : complex_cases . append ( atom ) #print(ut.depth(atom.value.data)) #print(atom.value) #print(atom.dumps()) #if len(atom.dumps()) > 200: # break accessed_attrs = [ ] for x in simple_cases : if x . value [ 0 ] . dumps ( ) == self_var : attr = x . value [ 1 ] . dumps ( ) accessed_attrs . append ( attr ) accessed_attrs = ut . unique ( accessed_attrs ) ut . setdiff ( accessed_attrs , class_vars )
class_ can either be live object or a classname
817
11
9,127
def get_funcnames_from_modpath ( modpath , include_methods = True ) : import utool as ut if True : import jedi source = ut . read_from ( modpath ) #script = jedi.Script(source=source, source_path=modpath, line=source.count('\n') + 1) definition_list = jedi . names ( source ) funcname_list = [ definition . name for definition in definition_list if definition . type == 'function' ] if include_methods : classdef_list = [ definition for definition in definition_list if definition . type == 'class' ] defined_methods = ut . flatten ( [ definition . defined_names ( ) for definition in classdef_list ] ) funcname_list += [ method . name for method in defined_methods if method . type == 'function' and not method . name . startswith ( '_' ) ] else : import redbaron # Pares a FULL syntax tree that keeps blockcomments sourcecode = ut . read_from ( modpath ) baron = redbaron . RedBaron ( sourcecode ) funcname_list = [ node . name for node in baron . find_all ( 'def' , recursive = include_methods ) if not node . name . startswith ( '_' ) ] return funcname_list
Get all functions defined in module
297
6
9,128
def help_members ( obj , use_other = False ) : import utool as ut attrnames = dir ( obj ) attr_list = [ getattr ( obj , attrname ) for attrname in attrnames ] attr_types = ut . lmap ( ut . type_str , map ( type , attr_list ) ) unique_types , groupxs = ut . group_indices ( attr_types ) type_to_items = ut . dzip ( unique_types , ut . apply_grouping ( attr_list , groupxs ) ) type_to_itemname = ut . dzip ( unique_types , ut . apply_grouping ( attrnames , groupxs ) ) #if memtypes is None: # memtypes = list(type_to_items.keys()) memtypes = [ 'instancemethod' ] # , 'method-wrapper'] func_mems = ut . dict_subset ( type_to_items , memtypes , [ ] ) func_list = ut . flatten ( func_mems . values ( ) ) defsig_list = [ ] num_unbound_args_list = [ ] num_args_list = [ ] for func in func_list : #args = ut.get_func_argspec(func).args argspec = ut . get_func_argspec ( func ) args = argspec . args unbound_args = get_unbound_args ( argspec ) defsig = ut . func_defsig ( func ) defsig_list . append ( defsig ) num_unbound_args_list . append ( len ( unbound_args ) ) num_args_list . append ( len ( args ) ) group = ut . hierarchical_group_items ( defsig_list , [ num_unbound_args_list , num_args_list ] ) print ( repr ( obj ) ) print ( ut . repr3 ( group , strvals = True ) ) if use_other : other_mems = ut . delete_keys ( type_to_items . copy ( ) , memtypes ) other_mems_attrnames = ut . dict_subset ( type_to_itemname , other_mems . keys ( ) ) named_other_attrs = ut . dict_union_combine ( other_mems_attrnames , other_mems , lambda x , y : list ( zip ( x , y ) ) ) print ( ut . repr4 ( named_other_attrs , nl = 2 , strvals = True ) )
r Inspects members of a class
563
7
9,129
def is_defined_by_module ( item , module , parent = None ) : flag = False if isinstance ( item , types . ModuleType ) : if not hasattr ( item , '__file__' ) : try : # hack for cv2 and xfeatures2d import utool as ut name = ut . get_modname_from_modpath ( module . __file__ ) flag = name in str ( item ) except : flag = False else : item_modpath = os . path . realpath ( dirname ( item . __file__ ) ) mod_fpath = module . __file__ . replace ( '.pyc' , '.py' ) if not mod_fpath . endswith ( '__init__.py' ) : flag = False else : modpath = os . path . realpath ( dirname ( mod_fpath ) ) modpath = modpath . replace ( '.pyc' , '.py' ) flag = item_modpath . startswith ( modpath ) elif hasattr ( item , '_utinfo' ) : # Capture case where there is a utool wrapper orig_func = item . _utinfo [ 'orig_func' ] flag = is_defined_by_module ( orig_func , module , parent ) else : if isinstance ( item , staticmethod ) : # static methods are a wrapper around a function item = item . __func__ try : func_globals = meta_util_six . get_funcglobals ( item ) func_module_name = func_globals [ '__name__' ] if func_module_name == 'line_profiler' : valid_names = dir ( module ) if parent is not None : valid_names += dir ( parent ) if item . func_name in valid_names : # hack to prevent small names #if len(item.func_name) > 8: if len ( item . func_name ) > 6 : flag = True elif func_module_name == module . __name__ : flag = True except AttributeError : if hasattr ( item , '__module__' ) : flag = item . __module__ == module . __name__ return flag
Check if item is directly defined by a module . This check may be prone to errors .
477
18
9,130
def is_bateries_included ( item ) : flag = False if hasattr ( item , '__call__' ) and hasattr ( item , '__module__' ) : if item . __module__ is not None : module = sys . modules [ item . __module__ ] if module == builtins : flag = True elif hasattr ( module , '__file__' ) : flag = LIB_PATH == dirname ( module . __file__ ) return flag
Returns if a value is a python builtin function
103
10
9,131
def dummy_func ( arg1 , arg2 , arg3 = None , arg4 = [ 1 , 2 , 3 ] , arg5 = { } , * * kwargs ) : foo = kwargs . get ( 'foo' , None ) bar = kwargs . pop ( 'bar' , 4 ) foo2 = kwargs [ 'foo2' ] foobar = str ( foo ) + str ( bar ) + str ( foo2 ) return foobar
test func for kwargs parseing
101
8
9,132
def get_docstr ( func_or_class ) : import utool as ut try : docstr_ = func_or_class . func_doc except AttributeError : docstr_ = func_or_class . __doc__ if docstr_ is None : docstr_ = '' docstr = ut . unindent ( docstr_ ) return docstr
Get the docstring from a live object
79
8
9,133
def find_funcs_called_with_kwargs ( sourcecode , target_kwargs_name = 'kwargs' ) : import ast sourcecode = 'from __future__ import print_function\n' + sourcecode pt = ast . parse ( sourcecode ) child_funcnamess = [ ] debug = False or VERYVERB_INSPECT if debug : print ( '\nInput:' ) print ( 'target_kwargs_name = %r' % ( target_kwargs_name , ) ) print ( '\nSource:' ) print ( sourcecode ) import astor print ( '\nParse:' ) print ( astor . dump ( pt ) ) class KwargParseVisitor ( ast . NodeVisitor ) : """ TODO: understand ut.update_existing and dict update ie, know when kwargs is passed to these functions and then look assume the object that was updated is a dictionary and check wherever that is passed to kwargs as well. """ def visit_FunctionDef ( self , node ) : if debug : print ( '\nVISIT FunctionDef node = %r' % ( node , ) ) print ( 'node.args.kwarg = %r' % ( node . args . kwarg , ) ) if six . PY2 : kwarg_name = node . args . kwarg else : if node . args . kwarg is None : kwarg_name = None else : kwarg_name = node . args . kwarg . arg #import utool as ut #ut.embed() if kwarg_name != target_kwargs_name : # target kwargs is still in scope ast . NodeVisitor . generic_visit ( self , node ) def visit_Call ( self , node ) : if debug : print ( '\nVISIT Call node = %r' % ( node , ) ) #print(ut.repr4(node.__dict__,)) if isinstance ( node . func , ast . Attribute ) : try : funcname = node . func . value . id + '.' + node . func . attr except AttributeError : funcname = None elif isinstance ( node . func , ast . Name ) : funcname = node . func . id else : raise NotImplementedError ( 'do not know how to parse: node.func = %r' % ( node . func , ) ) if six . PY2 : kwargs = node . kwargs kwargs_name = None if kwargs is None else kwargs . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) else : if node . keywords : for kwargs in node . keywords : if kwargs . arg is None : if hasattr ( kwargs . value , 'id' ) : kwargs_name = kwargs . value . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) ast . NodeVisitor . generic_visit ( self , node ) try : KwargParseVisitor ( ) . visit ( pt ) except Exception : raise pass #import utool as ut #if ut.SUPER_STRICT: # raise return child_funcnamess
r Finds functions that are called with the keyword kwargs variable
814
14
9,134
def get_func_argspec ( func ) : if hasattr ( func , '_utinfo' ) : argspec = func . _utinfo [ 'orig_argspec' ] return argspec if isinstance ( func , property ) : func = func . fget try : argspec = inspect . getargspec ( func ) except Exception : argspec = inspect . getfullargspec ( func ) return argspec
wrapper around inspect . getargspec but takes into account utool decorators
89
15
9,135
def parse_func_kwarg_keys ( func , with_vals = False ) : sourcecode = get_func_sourcecode ( func , strip_docstr = True , strip_comments = True ) kwkeys = parse_kwarg_keys ( sourcecode , with_vals = with_vals ) #ut.get_func_kwargs TODO return kwkeys
hacky inference of kwargs keys
81
8
9,136
def get_func_kwargs ( func , recursive = True ) : import utool as ut argspec = ut . get_func_argspec ( func ) if argspec . defaults is None : header_kw = { } else : header_kw = dict ( zip ( argspec . args [ : : - 1 ] , argspec . defaults [ : : - 1 ] ) ) if argspec . keywords is not None : header_kw . update ( dict ( ut . recursive_parse_kwargs ( func ) ) ) return header_kw
func = ibeis . run_experiment
115
10
9,137
def argparse_funckw ( func , defaults = { } , * * kwargs ) : import utool as ut funckw_ = ut . get_funckw ( func , recursive = True ) funckw_ . update ( defaults ) funckw = ut . argparse_dict ( funckw_ , * * kwargs ) return funckw
allows kwargs to be specified on the commandline from testfuncs
81
15
9,138
def toggle ( self , key ) : val = self [ key ] assert isinstance ( val , bool ) , 'key[%r] = %r is not a bool' % ( key , val ) self . pref_update ( key , not val )
Toggles a boolean key
54
5
9,139
def change_combo_val ( self , new_val ) : choice_obj = self . _intern . value assert isinstance ( self . _intern . value , PrefChoice ) , 'must be a choice' return choice_obj . get_tuple ( )
Checks to see if a selection is a valid index or choice of a combo preference
57
17
9,140
def iteritems ( self ) : for ( key , val ) in six . iteritems ( self . __dict__ ) : if key in self . _printable_exclude : continue yield ( key , val )
Wow this class is messed up . I had to overwrite items when moving to python3 just because I haden t called it yet
45
26
9,141
def to_dict ( self , split_structs_bit = False ) : pref_dict = { } struct_dict = { } for ( key , val ) in six . iteritems ( self ) : if split_structs_bit and isinstance ( val , Pref ) : struct_dict [ key ] = val continue pref_dict [ key ] = val if split_structs_bit : return ( pref_dict , struct_dict ) return pref_dict
Converts prefeters to a dictionary . Children Pref can be optionally separated
99
14
9,142
def save ( self ) : fpath = self . get_fpath ( ) if fpath in [ '' , None ] : if self . _tree . parent is not None : if VERBOSE_PREF : print ( '[pref.save] Can my parent save me?' ) # ...to disk return self . _tree . parent . save ( ) if VERBOSE_PREF : print ( '[pref.save] I cannot be saved. I have no parents.' ) return False with open ( fpath , 'wb' ) as f : print ( '[pref] Saving to ' + fpath ) pref_dict = self . to_dict ( ) pickle . dump ( pref_dict , f , protocol = 2 ) # Use protocol 2 to support python2 and 3 return True
Saves prefs to disk in dict format
169
9
9,143
def load ( self ) : if VERBOSE_PREF : print ( '[pref.load()]' ) #if not os.path.exists(self._intern.fpath): # msg = '[pref] fpath=%r does not exist' % (self._intern.fpath) # return msg fpath = self . get_fpath ( ) try : with open ( fpath , 'rb' ) as f : if VERBOSE_PREF : print ( 'load: %r' % fpath ) pref_dict = pickle . load ( f ) except EOFError as ex1 : util_dbg . printex ( ex1 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) #warnings.warn(msg) raise #return msg except ImportError as ex2 : util_dbg . printex ( ex2 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) #warnings.warn(msg) raise #return msg if not util_type . is_dict ( pref_dict ) : raise Exception ( 'Preference file is corrupted' ) self . add_dict ( pref_dict ) return True
Read pref dict stored on disk . Overwriting current values .
267
12
9,144
def full_name ( self ) : if self . _tree . parent is None : return self . _intern . name return self . _tree . parent . full_name ( ) + '.' + self . _intern . name
returns name all the way up the tree
48
9
9,145
def pref_update ( self , key , new_val ) : print ( 'Update and save pref from: %s=%r, to: %s=%r' % ( key , six . text_type ( self [ key ] ) , key , six . text_type ( new_val ) ) ) self . __setattr__ ( key , new_val ) return self . save ( )
Changes a preference value and saves it to disk
86
9
9,146
def __get_permissions ( self , res , * * kwargs ) : response = res . _ ( * * kwargs ) return response . get ( 'permissions' , None )
This call returns current login user s permissions .
42
9
9,147
def inject_all_external_modules ( self , classname = None , allow_override = 'override+warn' , strict = True ) : #import utool as ut if classname is None : classname = self . __class__ . __name__ #import utool as ut #ut.embed() NEW = True if NEW : classkey_list = [ key for key in __CLASSTYPE_ATTRIBUTES__ if key [ 0 ] == classname ] else : injected_modules = get_injected_modules ( classname ) # the variable must be named CLASS_INJECT_KEY # and only one class can be specified per module. classkey_list = [ module . CLASS_INJECT_KEY for module in injected_modules ] for classkey in classkey_list : inject_instance ( self , classkey = classkey , allow_override = allow_override , strict = False ) for classkey in classkey_list : postinject_instance ( self , classkey = classkey )
dynamically injects registered module methods into a class instance
224
12
9,148
def decorate_class_method ( func , classkey = None , skipmain = False ) : #import utool as ut global __CLASSTYPE_ATTRIBUTES__ assert classkey is not None , 'must specify classkey' #if not (skipmain and ut.get_caller_modname() == '__main__'): __CLASSTYPE_ATTRIBUTES__ [ classkey ] . append ( func ) return func
Will inject all decorated function as methods of classkey
102
10
9,149
def decorate_postinject ( func , classkey = None , skipmain = False ) : #import utool as ut global __CLASSTYPE_POSTINJECT_FUNCS__ assert classkey is not None , 'must specify classkey' #if not (skipmain and ut.get_caller_modname() == '__main__'): __CLASSTYPE_POSTINJECT_FUNCS__ [ classkey ] . append ( func ) return func
Will perform func with argument self after inject_instance is called on classkey
104
15
9,150
def inject_func_as_method ( self , func , method_name = None , class_ = None , allow_override = False , allow_main = False , verbose = True , override = None , force = False ) : if override is not None : # TODO depcirate allow_override allow_override = override if method_name is None : method_name = get_funcname ( func ) if force : allow_override = True allow_main = True old_method = getattr ( self , method_name , None ) # Bind function to the class instance #new_method = types.MethodType(func, self, self.__class__) new_method = func . __get__ ( self , self . __class__ ) #new_method = profile(func.__get__(self, self.__class__)) if old_method is not None : old_im_func = get_method_func ( old_method ) new_im_func = get_method_func ( new_method ) if not allow_main and old_im_func is not None and ( get_funcglobals ( old_im_func ) [ '__name__' ] != '__main__' and get_funcglobals ( new_im_func ) [ '__name__' ] == '__main__' ) : if True or VERBOSE_CLASS : print ( '[util_class] skipping re-inject of %r from __main__' % method_name ) return if old_method is new_method or old_im_func is new_im_func : #if verbose and util_arg.NOT_QUIET: # print('WARNING: Skipping injecting the same function twice: %r' % new_method) #print('WARNING: Injecting the same function twice: %r' % new_method) return elif allow_override is False : raise AssertionError ( 'Overrides are not allowed. Already have method_name=%r' % ( method_name ) ) elif allow_override == 'warn' : print ( 'WARNING: Overrides are not allowed. Already have method_name=%r. Skipping' % ( method_name ) ) return elif allow_override == 'override+warn' : #import utool as ut #ut.embed() print ( 'WARNING: Overrides are allowed, but dangerous. method_name=%r.' % ( method_name ) ) print ( 'old_method = %r, im_func=%s' % ( old_method , str ( old_im_func ) ) ) print ( 'new_method = %r, im_func=%s' % ( new_method , str ( new_im_func ) ) ) print ( get_funcglobals ( old_im_func ) [ '__name__' ] ) print ( get_funcglobals ( new_im_func ) [ '__name__' ] ) # TODO: does this actually decrement the refcount enough? del old_method setattr ( self , method_name , new_method )
Injects a function into an object as a method
689
11
9,151
def inject_func_as_unbound_method ( class_ , func , method_name = None ) : if method_name is None : method_name = get_funcname ( func ) setattr ( class_ , method_name , func )
This is actually quite simple
54
5
9,152
def reloading_meta_metaclass_factory ( BASE_TYPE = type ) : class ReloadingMetaclass2 ( BASE_TYPE ) : def __init__ ( metaself , name , bases , dct ) : super ( ReloadingMetaclass2 , metaself ) . __init__ ( name , bases , dct ) #print('Making rrr for %r' % (name,)) metaself . rrr = reload_class return ReloadingMetaclass2
hack for pyqt
107
4
9,153
def reload_class ( self , verbose = True , reload_module = True ) : import utool as ut verbose = verbose or VERBOSE_CLASS classname = self . __class__ . __name__ try : modname = self . __class__ . __module__ if verbose : print ( '[class] reloading ' + classname + ' from ' + modname ) # --HACK-- if hasattr ( self , '_on_reload' ) : if verbose > 1 : print ( '[class] calling _on_reload for ' + classname ) self . _on_reload ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _on_reload function' ) # Do for all inheriting classes def find_base_clases ( _class , find_base_clases = None ) : class_list = [ ] for _baseclass in _class . __bases__ : parents = find_base_clases ( _baseclass , find_base_clases ) class_list . extend ( parents ) if _class is not object : class_list . append ( _class ) return class_list head_class = self . __class__ # Determine if parents need reloading class_list = find_base_clases ( head_class , find_base_clases ) # HACK ignore = { HashComparable2 } class_list = [ _class for _class in class_list if _class not in ignore ] for _class in class_list : if verbose : print ( '[class] reloading parent ' + _class . __name__ + ' from ' + _class . __module__ ) if _class . __module__ == '__main__' : # Attempt to find the module that is the main module # This may be very hacky and potentially break main_module_ = sys . modules [ _class . __module__ ] main_modname = ut . get_modname_from_modpath ( main_module_ . __file__ ) module_ = sys . modules [ main_modname ] else : module_ = sys . modules [ _class . __module__ ] if hasattr ( module_ , 'rrr' ) : if reload_module : module_ . rrr ( verbose = verbose ) else : if reload_module : import imp if verbose : print ( '[class] reloading ' + _class . __module__ + ' with imp' ) try : imp . reload ( module_ ) except ( ImportError , AttributeError ) : print ( '[class] fallback reloading ' + _class . __module__ + ' with imp' ) # one last thing to try. probably used ut.import_module_from_fpath # when importing this module imp . load_source ( module_ . __name__ , module_ . __file__ ) # Reset class attributes _newclass = getattr ( module_ , _class . __name__ ) reload_class_methods ( self , _newclass , verbose = verbose ) # --HACK-- # TODO: handle injected definitions if hasattr ( self , '_initialize_self' ) : if verbose > 1 : print ( '[class] calling _initialize_self for ' + classname ) self . _initialize_self ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _initialize_self function' ) except Exception as ex : ut . printex ( ex , 'Error Reloading Class' , keys = [ 'modname' , 'module' , 'class_' , 'class_list' , 'self' , ] ) raise
special class reloading function This function is often injected as rrr of classes
805
15
9,154
def reload_class_methods ( self , class_ , verbose = True ) : if verbose : print ( '[util_class] Reloading self=%r as class_=%r' % ( self , class_ ) ) self . __class__ = class_ for key in dir ( class_ ) : # Get unbound reloaded method func = getattr ( class_ , key ) if isinstance ( func , types . MethodType ) : # inject it into the old instance inject_func_as_method ( self , func , class_ = class_ , allow_override = True , verbose = verbose )
rebinds all class methods
135
6
9,155
def remove_private_obfuscation ( self ) : classname = self . __class__ . __name__ attrlist = [ attr for attr in dir ( self ) if attr . startswith ( '_' + classname + '__' ) ] for attr in attrlist : method = getattr ( self , attr ) truename = attr . replace ( '_' + classname + '__' , '__' ) setattr ( self , truename , method )
removes the python obfuscation of class privates so they can be executed as they appear in class source . Useful when playing with IPython .
110
29
9,156
def create_peptidequant_lookup ( fns , pqdb , poolnames , pepseq_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , pepseq_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
Calls lower level function to create a peptide quant lookup
169
12
9,157
def create_proteinquant_lookup ( fns , pqdb , poolnames , protacc_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , probcolpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , probcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_probability , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , protacc_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
Calls lower level function to create a protein quant lookup
187
11
9,158
def create_pep_protein_quant_lookup ( fns , pqdb , poolnames , featcolnr , patterns , storefuns , isobqcolpattern = None , psmnrpattern = None ) : tablefn_map = create_tablefn_map ( fns , pqdb , poolnames ) feat_map = pqdb . get_feature_map ( ) for pattern , storefun in zip ( patterns , storefuns ) : if pattern is None : continue colmap = get_colmap ( fns , pattern , single_col = True ) if colmap : store_single_col_data ( fns , tablefn_map , feat_map , storefun , featcolnr , colmap ) if isobqcolpattern is not None : isocolmap = get_colmap ( fns , isobqcolpattern , antipattern = psmnrpattern ) else : return if psmnrpattern is not None : psmcolmap = get_colmap ( fns , psmnrpattern ) else : psmcolmap = False create_isobaric_quant_lookup ( fns , tablefn_map , feat_map , pqdb , featcolnr , isocolmap , psmcolmap )
Does the work when creating peptide and protein quant lookups . This loops through storing options and parses columns passing on to the storing functions
275
28
9,159
def store_single_col_data ( fns , prottable_id_map , pacc_map , pqdbmethod , protacc_colnr , colmap ) : to_store = [ ] for fn , header , pquant in tsvreader . generate_tsv_pep_protein_quants ( fns ) : pacc_id = pacc_map [ pquant [ header [ protacc_colnr ] ] ] pqdata = ( pacc_id , prottable_id_map [ fn ] , pquant [ colmap [ fn ] ] ) to_store . append ( pqdata ) if len ( to_store ) > 10000 : pqdbmethod ( to_store ) to_store = [ ] pqdbmethod ( to_store )
General method to store single column data from protein tables in lookup
173
12
9,160
def map_psmnrcol_to_quantcol ( quantcols , psmcols , tablefn_map ) : if not psmcols : for fn in quantcols : for qcol in quantcols [ fn ] : yield ( tablefn_map [ fn ] , qcol ) else : for fn in quantcols : for qcol , psmcol in zip ( quantcols [ fn ] , psmcols [ fn ] ) : yield ( tablefn_map [ fn ] , qcol , psmcol )
This function yields tuples of table filename isobaric quant column and if necessary number - of - PSM column
118
23
9,161
def avl_release_kids ( node ) : left , right = node . left , node . right if left is not None : # assert left.parent is node left . parent = None if right is not None : # assert right.parent is node right . parent = None node . balance = 0 node . left = None node . right = None return node , left , right
splits a node from its kids maintaining parent pointers
79
10
9,162
def avl_release_parent ( node ) : parent = node . parent if parent is not None : if parent . right is node : parent . right = None elif parent . left is node : parent . left = None else : raise AssertionError ( 'impossible state' ) node . parent = None parent . balance = max ( height ( parent . right ) , height ( parent . left ) ) + 1 return node , parent
removes the parent of a child
92
7
9,163
def avl_join ( t1 , t2 , node ) : if DEBUG_JOIN : print ( '-- JOIN node=%r' % ( node , ) ) if t1 is None and t2 is None : if DEBUG_JOIN : print ( 'Join Case 1' ) top = node elif t1 is None : # FIXME keep track of count if possible if DEBUG_JOIN : print ( 'Join Case 2' ) top = avl_insert_dir ( t2 , node , 0 ) elif t2 is None : if DEBUG_JOIN : print ( 'Join Case 3' ) top = avl_insert_dir ( t1 , node , 1 ) else : h1 = height ( t1 ) h2 = height ( t2 ) if h1 > h2 + 1 : if DEBUG_JOIN : print ( 'Join Case 4' ) top = avl_join_dir_recursive ( t1 , t2 , node , 1 ) if DEBUG_JOIN : ascii_tree ( t1 , 'top' ) elif h2 > h1 + 1 : if DEBUG_JOIN : print ( 'Join Case 5' ) ascii_tree ( t1 ) ascii_tree ( t2 ) top = avl_join_dir_recursive ( t1 , t2 , node , 0 ) if DEBUG_JOIN : ascii_tree ( top ) else : if DEBUG_JOIN : print ( 'Join Case 6' ) # Insert at the top of the tree top = avl_new_top ( t1 , t2 , node , 0 ) return top
Joins two trees t1 and t1 with an intermediate key - value pair
354
16
9,164
def avl_split_last ( root ) : if root is None : raise IndexError ( 'Empty tree has no maximum element' ) root , left , right = avl_release_kids ( root ) if right is None : new_root , last_node = left , root else : new_right , last_node = avl_split_last ( right ) new_root = avl_join ( left , new_right , root ) return ( new_root , last_node )
Removes the maximum element from the tree
106
8
9,165
def avl_split_first ( root ) : if root is None : raise IndexError ( 'Empty tree has no maximum element' ) root , left , right = avl_release_kids ( root ) if left is None : new_root , first_node = right , root else : new_left , first_node = avl_split_first ( left ) new_root = avl_join ( new_left , right , root ) return ( new_root , first_node )
Removes the minimum element from the tree
106
8
9,166
def avl_join2 ( t1 , t2 ) : if t1 is None and t2 is None : new_root = None elif t2 is None : new_root = t1 elif t1 is None : new_root = t2 else : new_left , last_node = avl_split_last ( t1 ) debug = 0 if debug : EulerTourTree ( root = new_left ) . _assert_nodes ( 'new_left' ) EulerTourTree ( root = last_node ) . _assert_nodes ( 'last_node' ) EulerTourTree ( root = t2 ) . _assert_nodes ( 't2' ) print ( 'new_left' ) EulerTourTree ( root = new_left ) . print_tree ( ) print ( 'last_node' ) EulerTourTree ( root = last_node ) . print_tree ( ) print ( 't2' ) EulerTourTree ( root = t2 ) . print_tree ( ) new_root = avl_join ( new_left , t2 , last_node ) if debug : print ( 'new_root' ) EulerTourTree ( root = new_root ) . print_tree ( ) EulerTourTree ( root = last_node ) . _assert_nodes ( 'new_root' ) return new_root
join two trees without any intermediate key
301
7
9,167
def to_networkx ( self , labels = None , edge_labels = False ) : import networkx as nx graph = nx . DiGraph ( ) for node in self . _traverse_nodes ( ) : u = node . key graph . add_node ( u ) # Minor redundancy # Set node properties graph . nodes [ u ] [ 'value' ] = node . value if labels is not None : label = ',' . join ( [ str ( getattr ( node , k ) ) for k in labels ] ) graph . nodes [ u ] [ 'label' ] = label if node . left is not None : v = node . left . key graph . add_node ( v ) graph . add_edge ( u , v ) if edge_labels : graph . edge [ u ] [ v ] [ 'label' ] = 'L' if node . right is not None : v = node . right . key graph . add_node ( v ) graph . add_edge ( u , v ) if edge_labels : graph . edge [ u ] [ v ] [ 'label' ] = 'R' return graph
Get a networkx representation of the binary search tree .
243
11
9,168
def repr_tree ( self ) : import utool as ut import networkx as nx repr_tree = nx . DiGraph ( ) for u , v in ut . itertwo ( self . values ( ) ) : if not repr_tree . has_edge ( v , u ) : repr_tree . add_edge ( u , v ) return repr_tree
reconstruct represented tree as a DiGraph to preserve the current rootedness
79
14
9,169
def unixjoin ( * args ) : isabs_list = list ( map ( isabs , args ) ) if any ( isabs_list ) : poslist = [ count for count , flag in enumerate ( isabs_list ) if flag ] pos = poslist [ - 1 ] return '/' . join ( args [ pos : ] ) else : return '/' . join ( args )
Like os . path . join but uses forward slashes on win32
84
14
9,170
def create_merge_psm_map ( peptides , ns ) : psmmap = { } for peptide in peptides : seq = reader . get_peptide_seq ( peptide , ns ) psm_ids = reader . get_psm_ids_from_peptide ( peptide , ns ) for psm_id in psm_ids : try : psmmap [ seq ] [ psm_id . text ] = 1 except KeyError : psmmap [ seq ] = { psm_id . text : 2 } for seq , psm_id_dict in psmmap . items ( ) : psmmap [ seq ] = [ x for x in psm_id_dict ] return psmmap
Loops through peptides stores sequences mapped to PSM ids .
163
14
9,171
def create_pool_b ( dsn = None , * , min_size = 10 , max_size = 10 , max_queries = 50000 , max_inactive_connection_lifetime = 300.0 , setup = None , init = None , loop = None , connection_class = BuildPgConnection , * * connect_kwargs , ) : return BuildPgPool ( dsn , connection_class = connection_class , min_size = min_size , max_size = max_size , max_queries = max_queries , loop = loop , setup = setup , init = init , max_inactive_connection_lifetime = max_inactive_connection_lifetime , * * connect_kwargs , )
Create a connection pool .
159
5
9,172
def add_runnable ( self , runnable ) : if runnable . id in self . runnables : raise SimError ( 'Duplicate runnable component {0}' . format ( runnable . id ) ) self . runnables [ runnable . id ] = runnable
Adds a runnable component to the list of runnable components in this simulation .
69
18
9,173
def run ( self ) : self . init_run ( ) if self . debug : self . dump ( "AfterInit: " ) #print("++++++++++++++++ Time: %f"%self.current_time) while self . step ( ) : #self.dump("Time: %f"%self.current_time) #print("++++++++++++++++ Time: %f"%self.current_time) pass
Runs the simulation .
85
5
9,174
def controller_creatr ( filename ) : if not check ( ) : click . echo ( Fore . RED + 'ERROR: Ensure you are in a bast app to run the create:controller command' ) return path = os . path . abspath ( '.' ) + '/controller' if not os . path . exists ( path ) : os . makedirs ( path ) # if os.path.isfile(path + ) file_name = str ( filename + '.py' ) if os . path . isfile ( path + "/" + file_name ) : click . echo ( Fore . WHITE + Back . RED + "ERROR: Controller file exists" ) return controller_file = open ( os . path . abspath ( '.' ) + '/controller/' + file_name , 'w+' ) compose = "from bast import Controller\n\nclass " + filename + "(Controller):\n pass" controller_file . write ( compose ) controller_file . close ( ) click . echo ( Fore . GREEN + "Controller " + filename + " created successfully" )
Name of the controller file to be created
229
8
9,175
def view_creatr ( filename ) : if not check ( ) : click . echo ( Fore . RED + 'ERROR: Ensure you are in a bast app to run the create:view command' ) return path = os . path . abspath ( '.' ) + '/public/templates' if not os . path . exists ( path ) : os . makedirs ( path ) filename_ = str ( filename + ".html" ) . lower ( ) view_file = open ( path + "/" + filename_ , 'w+' ) view_file . write ( "" ) view_file . close ( ) click . echo ( Fore . GREEN + "View file " + filename_ + "created in public/template folder" )
Name of the View File to be created
155
8
9,176
def migration_creatr ( migration_file , create , table ) : if not check ( ) : click . echo ( Fore . RED + 'ERROR: Ensure you are in a bast app to run the create:migration command' ) return migration = CreateMigration ( ) if table is None : table = snake_case ( migration_file ) file = migration . create_file ( snake_case ( migration_file ) , table = table , create = create ) click . echo ( Fore . GREEN + 'Migration file created at %s' % file )
Name of the migration file
117
5
9,177
def quit ( self ) : logging . info ( "quiting sock server" ) if self . __quit is not None : self . __quit . set ( ) self . join ( ) return
Quit socket server
40
4
9,178
def get_quantcols ( pattern , oldheader , coltype ) : if pattern is None : return False if coltype == 'precur' : return reader . get_cols_in_file ( pattern , oldheader , single_col = True )
Searches for quantification columns using pattern and header list . Calls reader function to do regexp . Returns a single column for precursor quant .
55
29
9,179
def get_peptide_quant ( quantdata , quanttype ) : parsefnx = { 'precur' : max } quantfloats = [ ] for q in quantdata : try : quantfloats . append ( float ( q ) ) except ( TypeError , ValueError ) : pass if not quantfloats : return 'NA' return str ( parsefnx [ quanttype ] ( quantfloats ) )
Parses lists of quantdata and returns maxvalue from them . Strips NA
89
17
9,180
def read_csv ( fpath ) : import csv import utool as ut #csvfile = open(fpath, 'rb') with open ( fpath , 'rb' ) as csvfile : row_iter = csv . reader ( csvfile , delimiter = str ( ',' ) , quotechar = str ( '|' ) ) row_list = [ ut . lmap ( ut . ensure_unicode , row ) for row in row_iter ] return row_list
reads csv in unicode
106
6
9,181
def get_caller_name ( N = 0 , strict = True ) : if isinstance ( N , ( list , tuple ) ) : name_list = [ ] for N_ in N : try : name_list . append ( get_caller_name ( N_ ) ) except AssertionError : name_list . append ( 'X' ) return '[' + '][' . join ( name_list ) + ']' # <get_parent_frame> parent_frame = get_stack_frame ( N = N + 2 , strict = strict ) # </get_parent_frame> caller_name = parent_frame . f_code . co_name if caller_name == '<module>' : co_filename = parent_frame . f_code . co_filename caller_name = splitext ( split ( co_filename ) [ 1 ] ) [ 0 ] if caller_name == '__init__' : co_filename = parent_frame . f_code . co_filename caller_name = basename ( dirname ( co_filename ) ) + '.' + caller_name return caller_name
Standalone version of get_caller_name
244
11
9,182
def _handle_ping ( self , packet , protocol ) : if 'payload' in packet : is_valid_node = True node_ids = list ( packet [ 'payload' ] . values ( ) ) for node_id in node_ids : if self . _repository . get_node ( node_id ) is None : is_valid_node = False break if is_valid_node : self . _pong ( packet , protocol ) else : self . _pong ( packet , protocol )
Responds to pings from registry_client only if the node_ids present in the ping payload are registered
111
22
9,183
def set_features ( self ) : allpsms_str = readers . generate_psms_multiple_fractions_strings ( self . mergefiles , self . ns ) allpeps = preparation . merge_peptides ( self . mergefiles , self . ns ) self . features = { 'psm' : allpsms_str , 'peptide' : allpeps }
Merge all psms and peptides
84
8
9,184
def git_sequence_editor_squash ( fpath ) : # print(sys.argv) import utool as ut text = ut . read_from ( fpath ) # print('fpath = %r' % (fpath,)) print ( text ) # Doesnt work because of fixed witdth requirement # search = (ut.util_regex.positive_lookbehind('[a-z]* [a-z0-9]* wip\n') + 'pick ' + # ut.reponamed_field('hash', '[a-z0-9]*') + ' wip') # repl = ('squash ' + ut.bref_field('hash') + ' wip') # import re # new_text = re.sub(search, repl, text, flags=re.MULTILINE) # print(new_text) prev_msg = None prev_dt = None new_lines = [ ] def get_commit_date ( hashid ) : out , err , ret = ut . cmd ( 'git show -s --format=%ci ' + hashid , verbose = False , quiet = True , pad_stdout = False ) # from datetime import datetime from dateutil import parser # print('out = %r' % (out,)) stamp = out . strip ( '\n' ) # print('stamp = %r' % (stamp,)) dt = parser . parse ( stamp ) # dt = datetime.strptime(stamp, '%Y-%m-%d %H:%M:%S %Z') # print('dt = %r' % (dt,)) return dt for line in text . split ( '\n' ) : commit_line = line . split ( ' ' ) if len ( commit_line ) < 3 : prev_msg = None prev_dt = None new_lines += [ line ] continue action = commit_line [ 0 ] hashid = commit_line [ 1 ] msg = ' ' . join ( commit_line [ 2 : ] ) try : dt = get_commit_date ( hashid ) except ValueError : prev_msg = None prev_dt = None new_lines += [ line ] continue orig_msg = msg can_squash = action == 'pick' and msg == 'wip' and prev_msg == 'wip' if prev_dt is not None and prev_msg == 'wip' : tdelta = dt - prev_dt # Only squash closely consecutive commits threshold_minutes = 45 td_min = ( tdelta . total_seconds ( ) / 60. ) # print(tdelta) can_squash &= td_min < threshold_minutes msg = msg + ' -- tdelta=%r' % ( ut . get_timedelta_str ( tdelta ) , ) if can_squash : new_line = ' ' . join ( [ 'squash' , hashid , msg ] ) new_lines += [ new_line ] else : new_lines += [ line ] prev_msg = orig_msg prev_dt = dt new_text = '\n' . join ( new_lines ) def get_commit_date ( hashid ) : out = ut . cmd ( 'git show -s --format=%ci ' + hashid , verbose = False ) print ( 'out = %r' % ( out , ) ) # print('Dry run') # ut.dump_autogen_code(fpath, new_text) print ( new_text ) ut . write_to ( fpath , new_text , n = None )
r squashes wip messages
792
5
9,185
def std_build_command ( repo = '.' ) : import utool as ut print ( '+**** stdbuild *******' ) print ( 'repo = %r' % ( repo , ) ) if sys . platform . startswith ( 'win32' ) : # vtool --rebuild-sver didnt work with this line #scriptname = './mingw_build.bat' scriptname = 'mingw_build.bat' else : scriptname = './unix_build.sh' if repo == '' : # default to cwd repo = '.' else : os . chdir ( repo ) ut . assert_exists ( scriptname ) normbuild_flag = '--no-rmbuild' if ut . get_argflag ( normbuild_flag ) : scriptname += ' ' + normbuild_flag # Execute build ut . cmd ( scriptname ) #os.system(scriptname) print ( 'L**** stdbuild *******' )
DEPRICATE My standard build script names .
213
10
9,186
def wait_for_import ( self , connection_id , wait_interval ) : self . stdout . write ( self . style . NOTICE ( 'Waiting for import' ) , ending = '' ) state = utils . ConnectionStates . IMPORT_CONFIGURATION while state == utils . ConnectionStates . IMPORT_CONFIGURATION : # before you get the first state, the API can be a bit behind self . stdout . write ( self . style . NOTICE ( '.' ) , ending = '' ) time . sleep ( wait_interval ) # take a breath try : connection = utils . get_connection ( connection_id ) except requests . HTTPError as e : raise CommandError ( "Failed to fetch connection information." ) from e else : state = connection [ 'state' ] self . stdout . write ( self . style . NOTICE ( ' Done!' ) )
Wait until connection state is no longer IMPORT_CONFIGURATION .
191
15
9,187
def setup ( self ) : if self . dry_run is not True : self . client = self . _get_client ( ) self . _disable_access_key ( )
Method runs the plugin
38
4
9,188
def validate ( self ) : try : response = self . client . get_access_key_last_used ( AccessKeyId = self . access_key_id ) username = response [ 'UserName' ] access_keys = self . client . list_access_keys ( UserName = username ) for key in access_keys [ 'AccessKeyMetadata' ] : if ( key [ 'AccessKeyId' ] == self . access_key_id ) and ( key [ 'Status' ] == 'Inactive' ) : return True return False except Exception as e : logger . info ( "Failed to validate key disable for " "key {id} due to: {e}." . format ( e = e , id = self . access_key_id ) ) return False
Returns whether this plugin does what it claims to have done
167
11
9,189
def _disable_access_key ( self , force_disable_self = False ) : client = self . client if self . validate is True : return else : try : client . update_access_key ( UserName = self . _search_user_for_key ( ) , AccessKeyId = self . access_key_id , Status = 'Inactive' ) logger . info ( "Access key {id} has " "been disabled." . format ( id = self . access_key_id ) ) except Exception as e : logger . info ( "Access key {id} could not " "be disabled due to: {e}." . format ( e = e , id = self . access_key_id ) )
This function first checks to see if the key is already disabled \
155
13
9,190
def generate_master_proteins ( psms , protcol ) : master_proteins = { } if not protcol : protcol = mzidtsvdata . HEADER_MASTER_PROT for psm in psms : protacc = psm [ protcol ] if ';' in protacc : continue master_proteins [ protacc ] = 1 if 'NA' in master_proteins : master_proteins . pop ( 'NA' ) if '' in master_proteins : master_proteins . pop ( '' ) for protacc in master_proteins : yield { prottabledata . HEADER_PROTEIN : protacc }
Fed with a psms generator this returns the master proteins present in the PSM table . PSMs with multiple master proteins are excluded .
152
27
9,191
def prepare_percolator_output ( self , fn ) : ns = xml . get_namespace ( fn ) static = readers . get_percolator_static_xml ( fn , ns ) return ns , static
Returns namespace and static xml from percolator output file
47
11
9,192
def git_available ( func ) : def inner ( * args ) : os . chdir ( APISettings . GIT_DIR ) if call ( [ 'git' , 'rev-parse' ] ) == 0 : return func ( * args ) Shell . fail ( 'There is no git repository!' ) return exit ( 1 ) return inner
Check if a git repository exists in the given folder .
72
11
9,193
def _cuda_get_gpu_spec_string ( gpu_ids = None ) : if gpu_ids is None : return '' if isinstance ( gpu_ids , list ) : return ',' . join ( str ( gpu_id ) for gpu_id in gpu_ids ) if isinstance ( gpu_ids , int ) : return str ( gpu_ids ) return gpu_ids
Build a GPU id string to be used for CUDA_VISIBLE_DEVICES .
92
18
9,194
def write_error ( self , status_code , * * kwargs ) : reason = self . _reason if self . settings . get ( "serve_traceback" ) and "exc_info" in kwargs : error = [ ] for line in traceback . format_exception ( * kwargs [ "exc_info" ] ) : error . append ( line ) else : error = None data = { '_traceback' : error , 'message' : reason , 'code' : status_code } content = self . render_exception ( * * data ) self . write ( content )
Handle Exceptions from the server . Formats the HTML into readable form
133
14
9,195
def view ( self , template_name , kwargs = None ) : if kwargs is None : kwargs = dict ( ) self . add_ ( 'session' , self . session ) content = self . render_template ( template_name , * * kwargs ) self . write ( content )
Used to render template to view
67
6
9,196
def initialize ( self , method , middleware , request_type ) : self . method = method self . middleware = middleware self . request_type = request_type
Overridden initialize method from Tornado . Assigns the controller method and middleware attached to the route being executed to global variables to be used
36
28
9,197
def only ( self , arguments ) : data = { } if not isinstance ( arguments , list ) : arguments = list ( arguments ) for i in arguments : data [ i ] = self . get_argument ( i ) return data
returns the key value pair of the arguments passed as a dict object
48
14
9,198
def all ( self ) : data = { } args = self . request . arguments for key , value in args . items ( ) : data [ key ] = self . get_argument ( key ) return data
Returns all the arguments passed with the request
43
8
9,199
def except_ ( self , arguments ) : if not isinstance ( arguments , list ) : arguments = list ( arguments ) args = self . request . arguments data = { } for key , value in args . items ( ) : if key not in arguments : data [ key ] = self . get_argument ( key ) return data
returns the arguments passed to the route except that set by user
68
13