idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
23,200
def decr ( self , conn , key , decrement = 1 ) : assert self . _validate_key ( key ) resp = yield from self . _incr_decr ( conn , b'decr' , key , decrement ) return resp
Command is used to change data for some item in - place decrementing it . The data for the item is treated as decimal representation of a 64 - bit unsigned integer .
55
35
23,201
def touch ( self , conn , key , exptime ) : assert self . _validate_key ( key ) _cmd = b' ' . join ( [ b'touch' , key , str ( exptime ) . encode ( 'utf-8' ) ] ) cmd = _cmd + b'\r\n' resp = yield from self . _execute_simple_command ( conn , cmd ) if resp not in ( const . TOUCHED , const . NOT_FOUND ) : raise ClientException ( 'Memcached touch failed' , resp ) return resp == const . TOUCHED
The command is used to update the expiration time of an existing item without fetching it .
131
18
23,202
def version ( self , conn ) : command = b'version\r\n' response = yield from self . _execute_simple_command ( conn , command ) if not response . startswith ( const . VERSION ) : raise ClientException ( 'Memcached version failed' , response ) version , number = response . split ( ) return number
Current version of the server .
75
6
23,203
def flush_all ( self , conn ) : command = b'flush_all\r\n' response = yield from self . _execute_simple_command ( conn , command ) if const . OK != response : raise ClientException ( 'Memcached flush_all failed' , response )
Its effect is to invalidate all existing items immediately
62
10
23,204
def create_account ( self , short_name , author_name = None , author_url = None , replace_token = True ) : response = self . _telegraph . method ( 'createAccount' , values = { 'short_name' : short_name , 'author_name' : author_name , 'author_url' : author_url } ) if replace_token : self . _telegraph . access_token = response . get ( 'access_token' ) return response
Create a new Telegraph account
106
5
23,205
def edit_account_info ( self , short_name = None , author_name = None , author_url = None ) : return self . _telegraph . method ( 'editAccountInfo' , values = { 'short_name' : short_name , 'author_name' : author_name , 'author_url' : author_url } )
Update information about a Telegraph account . Pass only the parameters that you want to edit
77
16
23,206
def revoke_access_token ( self ) : response = self . _telegraph . method ( 'revokeAccessToken' ) self . _telegraph . access_token = response . get ( 'access_token' ) return response
Revoke access_token and generate a new one for example if the user would like to reset all connected sessions or you have reasons to believe the token was compromised . On success returns dict with new access_token and auth_url fields
49
47
23,207
def get_page ( self , path , return_content = True , return_html = True ) : response = self . _telegraph . method ( 'getPage' , path = path , values = { 'return_content' : return_content } ) if return_content and return_html : response [ 'content' ] = nodes_to_html ( response [ 'content' ] ) return response
Get a Telegraph page
86
4
23,208
def create_page ( self , title , content = None , html_content = None , author_name = None , author_url = None , return_content = False ) : if content is None : content = html_to_nodes ( html_content ) content_json = json . dumps ( content ) return self . _telegraph . method ( 'createPage' , values = { 'title' : title , 'author_name' : author_name , 'author_url' : author_url , 'content' : content_json , 'return_content' : return_content } )
Create a new Telegraph page
128
5
23,209
def get_account_info ( self , fields = None ) : return self . _telegraph . method ( 'getAccountInfo' , { 'fields' : json . dumps ( fields ) if fields else None } )
Get information about a Telegraph account
46
6
23,210
def get_views ( self , path , year = None , month = None , day = None , hour = None ) : return self . _telegraph . method ( 'getViews' , path = path , values = { 'year' : year , 'month' : month , 'day' : day , 'hour' : hour } )
Get the number of views for a Telegraph article
73
9
23,211
def upload_file ( f ) : with FilesOpener ( f ) as files : response = requests . post ( 'https://telegra.ph/upload' , files = files ) . json ( ) if isinstance ( response , list ) : error = response [ 0 ] . get ( 'error' ) else : error = response . get ( 'error' ) if error : raise TelegraphException ( error ) return [ i [ 'src' ] for i in response ]
Upload file to Telegra . ph s servers . Returns a list of links . Allowed only . jpg . jpeg . png . gif and . mp4 files .
100
37
23,212
def get_by_natural_key ( self , * args ) : kwargs = self . natural_key_kwargs ( * args ) # Since kwargs already has __ lookups in it, we could just do this: # return self.get(**kwargs) # But, we should call each related model's get_by_natural_key in case # it's been overridden for name , rel_to in self . model . get_natural_key_info ( ) : if not rel_to : continue # Extract natural key for related object nested_key = extract_nested_key ( kwargs , rel_to , name ) if nested_key : # Update kwargs with related object try : kwargs [ name ] = rel_to . objects . get_by_natural_key ( * nested_key ) except rel_to . DoesNotExist : # If related object doesn't exist, assume this one doesn't raise self . model . DoesNotExist ( ) else : kwargs [ name ] = None return self . get ( * * kwargs )
Return the object corresponding to the provided natural key .
237
10
23,213
def create_by_natural_key ( self , * args ) : kwargs = self . natural_key_kwargs ( * args ) for name , rel_to in self . model . get_natural_key_info ( ) : if not rel_to : continue nested_key = extract_nested_key ( kwargs , rel_to , name ) # Automatically create any related objects as needed if nested_key : kwargs [ name ] , is_new = ( rel_to . objects . get_or_create_by_natural_key ( * nested_key ) ) else : kwargs [ name ] = None return self . create ( * * kwargs )
Create a new object from the provided natural key values . If the natural key contains related objects recursively get or create them by their natural keys .
151
30
23,214
def get_or_create_by_natural_key ( self , * args ) : try : return self . get_by_natural_key ( * args ) , False except self . model . DoesNotExist : return self . create_by_natural_key ( * args ) , True
get_or_create + get_by_natural_key
63
13
23,215
def resolve_keys ( self , keys , auto_create = False ) : resolved = { } success = True for key in keys : if auto_create : resolved [ key ] = self . find ( * key ) else : try : resolved [ key ] = self . get_by_natural_key ( * key ) except self . model . DoesNotExist : success = False resolved [ key ] = None return resolved , success
Resolve the list of given keys into objects if possible . Returns a mapping and a success indicator .
90
20
23,216
def get_natural_key_info ( cls ) : fields = cls . get_natural_key_def ( ) info = [ ] for name in fields : field = cls . _meta . get_field ( name ) rel_to = None if hasattr ( field , 'rel' ) : rel_to = field . rel . to if field . rel else None elif hasattr ( field , 'remote_field' ) : if field . remote_field : rel_to = field . remote_field . model else : rel_to = None info . append ( ( name , rel_to ) ) return info
Derive natural key from first unique_together definition noting which fields are related objects vs . regular fields .
134
21
23,217
def get_natural_key_fields ( cls ) : natural_key = [ ] for name , rel_to in cls . get_natural_key_info ( ) : if not rel_to : natural_key . append ( name ) else : nested_key = rel_to . get_natural_key_fields ( ) natural_key . extend ( [ name + '__' + nname for nname in nested_key ] ) return natural_key
Determine actual natural key field list incorporating the natural keys of related objects as needed .
100
18
23,218
def natural_key ( self ) : # Recursively extract properties from related objects if needed vals = [ reduce ( getattr , name . split ( '__' ) , self ) for name in self . get_natural_key_fields ( ) ] return vals
Return the natural key for this object .
57
8
23,219
def derive_coordinates ( self ) : if self . _coordinates_resolved : # Coordinates were already resolved. Skip return if self . seg_map is not None : # Translate coordinates self . start , self . filename , include_ref = self . seg_map . derive_source_offset ( self . start ) self . end , end_filename , _ = self . seg_map . derive_source_offset ( self . end , is_end = True ) else : end_filename = self . filename line_start = 0 lineno = 1 file_pos = 0 # Skip deriving end coordinate if selection spans multiple files if self . filename != end_filename : get_end = False elif self . end is None : get_end = False else : get_end = True if ( self . filename is not None ) and ( self . start is not None ) : with open ( self . filename , 'r' , newline = '' , encoding = 'utf_8' ) as fp : while True : line_text = fp . readline ( ) file_pos += len ( line_text ) if line_text == "" : break if ( self . start_line is None ) and ( self . start < file_pos ) : self . start_line = lineno self . start_col = self . start - line_start self . start_line_text = line_text . rstrip ( "\n" ) . rstrip ( "\r" ) if not get_end : break if get_end and ( self . end_line is None ) and ( self . end < file_pos ) : self . end_line = lineno self . end_col = self . end - line_start break lineno += 1 line_start = file_pos # If no end coordinate was derived, just do a single char selection if not get_end : self . end_line = self . start_line self . end_col = self . start_col self . end = self . start self . _coordinates_resolved = True
Depending on the compilation source some members of the SourceRef object may be incomplete . Calling this function performs the necessary derivations to complete the object .
445
29
23,220
def format_message ( self , severity , text , src_ref ) : lines = [ ] if severity >= Severity . ERROR : color = Fore . RED elif severity >= Severity . WARNING : color = Fore . YELLOW else : color = Fore . GREEN if src_ref is None : # No message context available lines . append ( color + Style . BRIGHT + severity . name . lower ( ) + ": " + Style . RESET_ALL + text ) return lines src_ref . derive_coordinates ( ) if ( src_ref . start_line is not None ) and ( src_ref . start_col is not None ) : # Start line and column is known lines . append ( Fore . WHITE + Style . BRIGHT + "%s:%d:%d: " % ( src_ref . filename , src_ref . start_line , src_ref . start_col ) + color + severity . name . lower ( ) + ": " + Style . RESET_ALL + text ) elif src_ref . start_line is not None : # Only line number is known lines . append ( Fore . WHITE + Style . BRIGHT + "%s:%d: " % ( src_ref . filename , src_ref . start_line ) + color + severity . name . lower ( ) + ": " + Style . RESET_ALL + text ) else : # Only filename is known lines . append ( Fore . WHITE + Style . BRIGHT + "%s: " % src_ref . filename + color + severity . name . lower ( ) + ": " + Style . RESET_ALL + text ) # If src_ref highlights a span within a single line of text, print it if ( src_ref . start_line is not None ) and ( src_ref . end_line is not None ) : if src_ref . start_line != src_ref . end_line : # multi-line reference # Select remainder of the line width = len ( src_ref . start_line_text ) - src_ref . start_col lines . append ( src_ref . start_line_text [ : src_ref . start_col ] + color + Style . BRIGHT + src_ref . start_line_text [ src_ref . start_col : ] + Style . RESET_ALL ) lines . append ( " " * src_ref . start_col + color + Style . BRIGHT + "^" * width + Style . RESET_ALL ) else : # Single line width = src_ref . end_col - src_ref . start_col + 1 lines . append ( src_ref . start_line_text [ : src_ref . start_col ] + color + Style . BRIGHT + src_ref . start_line_text [ src_ref . start_col : src_ref . end_col + 1 ] + Style . RESET_ALL + src_ref . start_line_text [ src_ref . end_col + 1 : ] ) lines . append ( " " * src_ref . start_col + color + Style . BRIGHT + "^" * width + Style . RESET_ALL ) return lines
Formats the message prior to emitting it .
687
9
23,221
def emit_message ( self , lines ) : for line in lines : print ( line , file = sys . stderr )
Emit message . Default printer emits messages to stderr
27
12
23,222
def get_value ( self ) : if ( self . _value is None ) and ( self . expr is not None ) : self . _value = self . expr . get_value ( ) return self . _value
Evaluate self . expr to get the parameter s value
46
12
23,223
def is_castable ( src , dst ) : if ( ( src in [ int , bool ] ) or rdltypes . is_user_enum ( src ) ) and ( dst in [ int , bool ] ) : # Pure numeric or enum can be cast to a numeric return True elif ( src == rdltypes . ArrayPlaceholder ) and ( dst == rdltypes . ArrayPlaceholder ) : # Check that array element types also match if src . element_type is None : # indeterminate array type. Is castable return True elif src . element_type == dst . element_type : return True else : return False elif rdltypes . is_user_struct ( dst ) : # Structs can be assigned their derived counterparts - aka their subclasses return issubclass ( src , dst ) elif dst == rdltypes . PropertyReference : return issubclass ( src , rdltypes . PropertyReference ) elif src == dst : return True else : return False
Check if src type can be cast to dst type
210
10
23,224
def predict_type ( self ) : current_comp = self . ref_root for name , array_suffixes , name_src_ref in self . ref_elements : # find instance current_comp = current_comp . get_child_by_name ( name ) if current_comp is None : # Not found! self . msg . fatal ( "Could not resolve hierarchical reference to '%s'" % name , name_src_ref ) # Do type-check in array suffixes for array_suffix in array_suffixes : array_suffix . predict_type ( ) # Check array suffixes if ( isinstance ( current_comp , comp . AddressableComponent ) ) and current_comp . is_array : # is an array if len ( array_suffixes ) != len ( current_comp . array_dimensions ) : self . msg . fatal ( "Incompatible number of index dimensions after '%s'. Expected %d, found %d." % ( name , len ( current_comp . array_dimensions ) , len ( array_suffixes ) ) , name_src_ref ) elif array_suffixes : # Has array suffixes. Check if compatible with referenced component self . msg . fatal ( "Unable to index non-array component '%s'" % name , name_src_ref ) return type ( current_comp )
Traverse the ref_elements path and determine the component type being referenced . Also do some checks on the array indexes
298
24
23,225
def get_value ( self , eval_width = None ) : resolved_ref_elements = [ ] for name , array_suffixes , name_src_ref in self . ref_elements : idx_list = [ suffix . get_value ( ) for suffix in array_suffixes ] resolved_ref_elements . append ( ( name , idx_list , name_src_ref ) ) # Create container cref = rdltypes . ComponentRef ( self . ref_root , resolved_ref_elements ) return cref
Build a resolved ComponentRef container that describes the relative path
120
11
23,226
def predict_type ( self ) : inst_type = self . inst_ref . predict_type ( ) if self . prop_ref_type . allowed_inst_type != inst_type : self . msg . fatal ( "'%s' is not a valid property of instance" % self . prop_ref_type . get_name ( ) , self . src_ref ) return self . prop_ref_type
Predict the type of the inst_ref and make sure the property being referenced is allowed
89
18
23,227
def get_group_node_size ( node ) : # After structural placement, children are sorted if ( not node . inst . children or ( not isinstance ( node . inst . children [ - 1 ] , comp . AddressableComponent ) ) ) : # No addressable child exists. return 0 # Current node's size is based on last child last_child_node = Node . _factory ( node . inst . children [ - 1 ] , node . env , node ) return ( last_child_node . inst . addr_offset + last_child_node . total_size )
Shared getter for AddrmapNode and RegfileNode s size property
124
16
23,228
def add_derived_property ( cls , getter_function , name = None ) : if name is None : name = getter_function . __name__ mp = property ( fget = getter_function ) setattr ( cls , name , mp )
Register a user - defined derived property
57
7
23,229
def children ( self , unroll = False , skip_not_present = True ) : for child_inst in self . inst . children : if skip_not_present : # Check if property ispresent == False if not child_inst . properties . get ( 'ispresent' , True ) : # ispresent was explicitly set to False. Skip it continue if unroll and isinstance ( child_inst , comp . AddressableComponent ) and child_inst . is_array : # Unroll the array range_list = [ range ( n ) for n in child_inst . array_dimensions ] for idxs in itertools . product ( * range_list ) : N = Node . _factory ( child_inst , self . env , self ) N . current_idx = idxs # pylint: disable=attribute-defined-outside-init yield N else : yield Node . _factory ( child_inst , self . env , self )
Returns an iterator that provides nodes for all immediate children of this component .
205
14
23,230
def descendants ( self , unroll = False , skip_not_present = True , in_post_order = False ) : for child in self . children ( unroll , skip_not_present ) : if in_post_order : yield from child . descendants ( unroll , skip_not_present , in_post_order ) yield child if not in_post_order : yield from child . descendants ( unroll , skip_not_present , in_post_order )
Returns an iterator that provides nodes for all descendants of this component .
103
13
23,231
def signals ( self , skip_not_present = True ) : for child in self . children ( skip_not_present = skip_not_present ) : if isinstance ( child , SignalNode ) : yield child
Returns an iterator that provides nodes for all immediate signals of this component .
46
14
23,232
def fields ( self , skip_not_present = True ) : for child in self . children ( skip_not_present = skip_not_present ) : if isinstance ( child , FieldNode ) : yield child
Returns an iterator that provides nodes for all immediate fields of this component .
46
14
23,233
def registers ( self , unroll = False , skip_not_present = True ) : for child in self . children ( unroll , skip_not_present ) : if isinstance ( child , RegNode ) : yield child
Returns an iterator that provides nodes for all immediate registers of this component .
48
14
23,234
def find_by_path ( self , path ) : pathparts = path . split ( '.' ) current_node = self for pathpart in pathparts : m = re . fullmatch ( r'^(\w+)((?:\[(?:\d+|0[xX][\da-fA-F]+)\])*)$' , pathpart ) if not m : raise ValueError ( "Invalid path" ) inst_name , array_suffix = m . group ( 1 , 2 ) idx_list = [ int ( s , 0 ) for s in re . findall ( r'\[(\d+|0[xX][\da-fA-F]+)\]' , array_suffix ) ] current_node = current_node . get_child_by_name ( inst_name ) if current_node is None : return None if idx_list : if ( isinstance ( current_node , AddressableNode ) ) and current_node . inst . is_array : # is an array if len ( idx_list ) != len ( current_node . inst . array_dimensions ) : raise IndexError ( "Wrong number of array dimensions" ) current_node . current_idx = [ ] # pylint: disable=attribute-defined-outside-init for i , idx in enumerate ( idx_list ) : if idx >= current_node . inst . array_dimensions [ i ] : raise IndexError ( "Array index out of range" ) current_node . current_idx . append ( idx ) else : raise IndexError ( "Index attempted on non-array component" ) return current_node
Finds the descendant node that is located at the relative path Returns None if not found Raises exception if path is malformed or array index is out of range
364
32
23,235
def get_property ( self , prop_name , * * kwargs ) : ovr_default = False default = None if 'default' in kwargs : ovr_default = True default = kwargs . pop ( 'default' ) # Check for stray kwargs if kwargs : raise TypeError ( "got an unexpected keyword argument '%s'" % list ( kwargs . keys ( ) ) [ 0 ] ) # If its already in the component, then safe to bypass checks if prop_name in self . inst . properties : prop_value = self . inst . properties [ prop_name ] if isinstance ( prop_value , rdltypes . ComponentRef ) : # If this is a hierarchical component reference, convert it to a Node reference prop_value = prop_value . build_node_ref ( self , self . env ) if isinstance ( prop_value , rdltypes . PropertyReference ) : prop_value . _resolve_node ( self ) return prop_value if ovr_default : # Default value is being overridden by user. Return their value return default # Otherwise, return its default value based on the property's rules rule = self . env . property_rules . lookup_property ( prop_name ) # Is it even a valid property or allowed for this component type? if rule is None : raise LookupError ( "Unknown property '%s'" % prop_name ) if type ( self . inst ) not in rule . bindable_to : raise LookupError ( "Unknown property '%s'" % prop_name ) # Return the default value as specified by the rulebook return rule . get_default ( self )
Gets the SystemRDL component property
357
8
23,236
def list_properties ( self , list_all = False ) : if list_all : props = [ ] for k , v in self . env . property_rules . rdl_properties . items ( ) : if type ( self . inst ) in v . bindable_to : props . append ( k ) for k , v in self . env . property_rules . user_properties . items ( ) : if type ( self . inst ) in v . bindable_to : props . append ( k ) return props else : return list ( self . inst . properties . keys ( ) )
Lists properties associated with this node . By default only lists properties that were explicitly set . If list_all is set to True then lists all valid properties of this component type
125
35
23,237
def get_path ( self , hier_separator = "." , array_suffix = "[{index:d}]" , empty_array_suffix = "[]" ) : if self . parent and not isinstance ( self . parent , RootNode ) : return ( self . parent . get_path ( hier_separator , array_suffix , empty_array_suffix ) + hier_separator + self . get_path_segment ( array_suffix , empty_array_suffix ) ) else : return self . get_path_segment ( array_suffix , empty_array_suffix )
Generate an absolute path string to this node
135
9
23,238
def get_html_desc ( self , markdown_inst = None ) : desc_str = self . get_property ( "desc" ) if desc_str is None : return None return rdlformatcode . rdlfc_to_html ( desc_str , self , md = markdown_inst )
Translates the node s desc property into HTML .
68
11
23,239
def address_offset ( self ) : if self . inst . is_array : if self . current_idx is None : raise ValueError ( "Index of array element must be known to derive address" ) # Calculate the "flattened" index of a general multidimensional array # For example, a component array declared as: # foo[S0][S1][S2] # and referenced as: # foo[I0][I1][I2] # Is flattened like this: # idx = I0*S1*S2 + I1*S2 + I2 idx = 0 for i in range ( len ( self . current_idx ) ) : sz = 1 for j in range ( i + 1 , len ( self . inst . array_dimensions ) ) : sz *= self . inst . array_dimensions [ j ] idx += sz * self . current_idx [ i ] offset = self . inst . addr_offset + idx * self . inst . array_stride else : offset = self . inst . addr_offset return offset
Byte address offset of this node relative to it s parent
237
11
23,240
def absolute_address ( self ) : if self . parent and not isinstance ( self . parent , RootNode ) : return self . parent . absolute_address + self . address_offset else : return self . address_offset
Get the absolute byte address of this node .
47
9
23,241
def top ( self ) : for child in self . children ( skip_not_present = False ) : if not isinstance ( child , AddrmapNode ) : continue return child raise RuntimeError
Returns the top - level addrmap node
41
8
23,242
def is_sw_writable ( self ) : sw = self . get_property ( 'sw' ) return sw in ( rdltypes . AccessType . rw , rdltypes . AccessType . rw1 , rdltypes . AccessType . w , rdltypes . AccessType . w1 )
Field is writable by software
67
6
23,243
def is_sw_readable ( self ) : sw = self . get_property ( 'sw' ) return sw in ( rdltypes . AccessType . rw , rdltypes . AccessType . rw1 , rdltypes . AccessType . r )
Field is readable by software
56
5
23,244
def implements_storage ( self ) : # 9.4.1, Table 12 sw = self . get_property ( 'sw' ) hw = self . get_property ( 'hw' ) if sw in ( rdltypes . AccessType . rw , rdltypes . AccessType . rw1 ) : # Software can read and write, implying a storage element return True if hw == rdltypes . AccessType . rw : # Hardware can read and write, implying a storage element return True if ( sw in ( rdltypes . AccessType . w , rdltypes . AccessType . w1 ) ) and ( hw == rdltypes . AccessType . r ) : # Write-only register visible to hardware is stored return True onread = self . get_property ( 'onread' ) if onread is not None : # 9.6.1-c: Onread side-effects imply storage regardless of whether # or not the field is writable by sw return True if self . get_property ( 'hwset' ) or self . get_property ( 'hwclr' ) : # Not in spec, but these imply that a storage element exists return True return False
True if combination of field access properties imply that the field implements a storage element .
255
16
23,245
def visitComponent_def ( self , ctx : SystemRDLParser . Component_defContext ) : # Get definition. Returns Component if ctx . component_anon_def ( ) is not None : comp_def = self . visit ( ctx . component_anon_def ( ) ) elif ctx . component_named_def ( ) is not None : comp_def = self . visit ( ctx . component_named_def ( ) ) else : raise RuntimeError comp_def . parent_scope = self . component if ctx . component_insts ( ) is not None : if isinstance ( self , RootVisitor ) and isinstance ( comp_def , comp . Addrmap ) : self . msg . warning ( "Non-standard instantiation of an addrmap in root namespace will be ignored" , SourceRef . from_antlr ( ctx . component_insts ( ) . component_inst ( 0 ) . ID ( ) ) ) else : # Component is instantiated one or more times if ctx . component_inst_type ( ) is not None : inst_type = self . visit ( ctx . component_inst_type ( ) ) else : inst_type = None # Pass some temporary info to visitComponent_insts self . _tmp = ( comp_def , inst_type , None ) self . visit ( ctx . component_insts ( ) ) return None
Create and possibly instantiate a component
301
7
23,246
def define_component ( self , body , type_token , def_name , param_defs ) : for subclass in ComponentVisitor . __subclasses__ ( ) : if subclass . comp_type == self . _CompType_Map [ type_token . type ] : visitor = subclass ( self . compiler , def_name , param_defs ) return visitor . visit ( body ) raise RuntimeError
Given component definition recurse to another ComponentVisitor to define a new component
86
15
23,247
def get_instance_assignment ( self , ctx ) : if ctx is None : return None visitor = ExprVisitor ( self . compiler ) expr = visitor . visit ( ctx . expr ( ) ) expr = expressions . AssignmentCast ( self . compiler . env , SourceRef . from_antlr ( ctx . op ) , expr , int ) expr . predict_type ( ) return expr
Gets the integer expression in any of the four instance assignment operators ( =
86
15
23,248
def visitParam_def ( self , ctx : SystemRDLParser . Param_defContext ) : self . compiler . namespace . enter_scope ( ) param_defs = [ ] for elem in ctx . getTypedRuleContexts ( SystemRDLParser . Param_def_elemContext ) : param_def = self . visit ( elem ) param_defs . append ( param_def ) self . compiler . namespace . exit_scope ( ) return param_defs
Parameter Definition block
106
3
23,249
def visitParam_def_elem ( self , ctx : SystemRDLParser . Param_def_elemContext ) : # Construct parameter type data_type_token = self . visit ( ctx . data_type ( ) ) param_data_type = self . datatype_from_token ( data_type_token ) if ctx . array_type_suffix ( ) is None : # Non-array type param_type = param_data_type else : # Array-like type param_type = rdltypes . ArrayPlaceholder ( param_data_type ) # Get parameter name param_name = get_ID_text ( ctx . ID ( ) ) # Get expression for parameter default, if any if ctx . expr ( ) is not None : visitor = ExprVisitor ( self . compiler ) default_expr = visitor . visit ( ctx . expr ( ) ) default_expr = expressions . AssignmentCast ( self . compiler . env , SourceRef . from_antlr ( ctx . ID ( ) ) , default_expr , param_type ) default_expr . predict_type ( ) else : default_expr = None # Create Parameter object param = Parameter ( param_type , param_name , default_expr ) # Register it in the parameter def namespace scope self . compiler . namespace . register_element ( param_name , param , None , SourceRef . from_antlr ( ctx . ID ( ) ) ) return param
Individual parameter definition elements
316
4
23,250
def datatype_from_token ( self , token ) : if token . type == SystemRDLParser . ID : # Is an identifier for either an enum or struct type typ = self . compiler . namespace . lookup_type ( get_ID_text ( token ) ) if typ is None : self . msg . fatal ( "Type '%s' is not defined" % get_ID_text ( token ) , SourceRef . from_antlr ( token ) ) if rdltypes . is_user_enum ( typ ) or rdltypes . is_user_struct ( typ ) : return typ else : self . msg . fatal ( "Type '%s' is not a struct or enum" % get_ID_text ( token ) , SourceRef . from_antlr ( token ) ) else : return self . _DataType_Map [ token . type ]
Given a SystemRDLParser token lookup the type This only includes types under the data_type grammar rule
186
21
23,251
def get_rdltype ( value ) : if isinstance ( value , ( int , bool , str ) ) : # Pass canonical types as-is return type ( value ) elif is_user_enum ( type ( value ) ) : return type ( value ) elif is_user_struct ( type ( value ) ) : return type ( value ) elif isinstance ( value , enum . Enum ) : return type ( value ) elif isinstance ( value , list ) : # Create ArrayPlaceholder representation # Determine element type and make sure it is uniform array_el_type = None for el in value : el_type = get_rdltype ( el ) if el_type is None : return None if ( array_el_type is not None ) and ( el_type != array_el_type ) : return None array_el_type = el_type return ArrayPlaceholder ( array_el_type ) else : return None
Given a value return the type identifier object used within the RDL compiler If not a supported type return None
203
21
23,252
def get_html_desc ( self , markdown_inst = None ) : desc_str = self . _rdl_desc_ if desc_str is None : return None return rdlformatcode . rdlfc_to_html ( desc_str , md = markdown_inst )
Translates the enum s desc property into HTML .
64
11
23,253
def get_scope_path ( cls , scope_separator = "::" ) : if cls . get_parent_scope ( ) is None : return "" elif isinstance ( cls . get_parent_scope ( ) , comp . Root ) : return "" else : parent_path = cls . get_parent_scope ( ) . get_scope_path ( scope_separator ) if parent_path : return ( parent_path + scope_separator + cls . get_parent_scope ( ) . type_name ) else : return cls . get_parent_scope ( ) . type_name
Generate a string that represents this enum s declaration namespace scope .
135
13
23,254
def define_new ( cls , name , members , is_abstract = False ) : m = OrderedDict ( cls . _members ) # Make sure derivation does not have any overlapping keys with its parent if set ( m . keys ( ) ) & set ( members . keys ( ) ) : raise ValueError ( "'members' contains keys that overlap with parent" ) m . update ( members ) dct = { '_members' : m , '_is_abstract' : is_abstract , } newcls = type ( name , ( cls , ) , dct ) return newcls
Define a new struct type derived from the current type .
133
12
23,255
def define_udp ( self , name , valid_type , valid_components = None , default = None ) : if valid_components is None : valid_components = [ comp . Field , comp . Reg , comp . Regfile , comp . Addrmap , comp . Mem , comp . Signal , #TODO constraint, ] if name in self . env . property_rules . rdl_properties : raise ValueError ( "name '%s' conflicts with existing built-in RDL property" ) udp = UserProperty ( self . env , name , valid_components , [ valid_type ] , default ) self . env . property_rules . user_properties [ udp . name ] = udp
Pre - define a user - defined property .
156
9
23,256
def compile_file ( self , path , incl_search_paths = None ) : if incl_search_paths is None : incl_search_paths = [ ] fpp = preprocessor . FilePreprocessor ( self . env , path , incl_search_paths ) preprocessed_text , seg_map = fpp . preprocess ( ) input_stream = preprocessor . PreprocessedInputStream ( preprocessed_text , seg_map ) lexer = SystemRDLLexer ( input_stream ) lexer . removeErrorListeners ( ) lexer . addErrorListener ( messages . RDLAntlrErrorListener ( self . msg ) ) token_stream = CommonTokenStream ( lexer ) parser = SystemRDLParser ( token_stream ) parser . removeErrorListeners ( ) parser . addErrorListener ( messages . RDLAntlrErrorListener ( self . msg ) ) # Run Antlr parser on input parsed_tree = parser . root ( ) if self . msg . had_error : self . msg . fatal ( "Parse aborted due to previous errors" ) # Traverse parse tree with RootVisitor self . visitor . visit ( parsed_tree ) # Reset default property assignments from namespace. # They should not be shared between files since that would be confusing. self . namespace . default_property_ns_stack = [ { } ] if self . msg . had_error : self . msg . fatal ( "Compile aborted due to previous errors" )
Parse & compile a single file and append it to RDLCompiler s root namespace .
319
19
23,257
def elaborate ( self , top_def_name = None , inst_name = None , parameters = None ) : if parameters is None : parameters = { } # Get top-level component definition to elaborate if top_def_name is not None : # Lookup top_def_name if top_def_name not in self . root . comp_defs : self . msg . fatal ( "Elaboration target '%s' not found" % top_def_name ) top_def = self . root . comp_defs [ top_def_name ] if not isinstance ( top_def , comp . Addrmap ) : self . msg . fatal ( "Elaboration target '%s' is not an 'addrmap' component" % top_def_name ) else : # Not specified. Find the last addrmap defined for comp_def in reversed ( list ( self . root . comp_defs . values ( ) ) ) : if isinstance ( comp_def , comp . Addrmap ) : top_def = comp_def top_def_name = comp_def . type_name break else : self . msg . fatal ( "Could not find any 'addrmap' components to elaborate" ) # Create an instance of the root component root_inst = deepcopy ( self . root ) root_inst . is_instance = True root_inst . original_def = self . root root_inst . inst_name = "$root" # Create a top-level instance top_inst = deepcopy ( top_def ) top_inst . is_instance = True top_inst . original_def = top_def top_inst . addr_offset = 0 top_inst . external = True # addrmap is always implied as external if inst_name is not None : top_inst . inst_name = inst_name else : top_inst . inst_name = top_def_name # Override parameters as needed for param_name , value in parameters . items ( ) : # Find the parameter to override parameter = None for p in top_inst . parameters : if p . name == param_name : parameter = p break else : raise ValueError ( "Parameter '%s' is not available for override" % param_name ) value_expr = expr . ExternalLiteral ( self . env , value ) value_type = value_expr . predict_type ( ) if value_type is None : raise TypeError ( "Override value for parameter '%s' is an unrecognized type" % param_name ) if value_type != parameter . param_type : raise TypeError ( "Incorrect type for parameter '%s'" % param_name ) parameter . expr = value_expr # instantiate top_inst into the root component instance root_inst . children . append ( top_inst ) root_node = RootNode ( root_inst , self . env , None ) # Resolve all expressions walker . RDLWalker ( skip_not_present = False ) . walk ( root_node , ElabExpressionsListener ( self . msg ) ) # Resolve address and field placement walker . RDLWalker ( skip_not_present = False ) . walk ( root_node , PrePlacementValidateListener ( self . msg ) , StructuralPlacementListener ( self . msg ) , LateElabListener ( self . msg ) ) # Validate design # Only need to validate nodes that are present walker . RDLWalker ( skip_not_present = True ) . walk ( root_node , ValidateListener ( self . env ) ) if self . msg . had_error : self . msg . fatal ( "Elaborate aborted due to previous errors" ) return root_node
Elaborates the design for the given top - level addrmap component .
793
15
23,258
def get_default ( self , node ) : if self . opposite_property in node . inst . properties : return not node . inst . properties [ self . opposite_property ] else : return self . default
If not explicitly set check if the opposite was set first before returning default
43
14
23,259
def get_default ( self , node ) : if node . inst . properties . get ( "onread" , None ) == rdltypes . OnReadType . rset : return True else : return self . default
If not explicitly set check if onread sets the equivalent
46
11
23,260
def assign_value ( self , comp_def , value , src_ref ) : super ( ) . assign_value ( comp_def , value , src_ref ) if "rclr" in comp_def . properties : del comp_def . properties [ "rclr" ] if "rset" in comp_def . properties : del comp_def . properties [ "rset" ]
Overrides other related properties
86
6
23,261
def get_default ( self , node ) : if node . inst . properties . get ( "rset" , False ) : return rdltypes . OnReadType . rset elif node . inst . properties . get ( "rclr" , False ) : return rdltypes . OnReadType . rclr else : return self . default
If not explicitly set check if rset or rclr imply the value
75
15
23,262
def get_default ( self , node ) : if node . inst . properties . get ( "onwrite" , None ) == rdltypes . OnWriteType . woclr : return True else : return self . default
If not explicitly set check if onwrite sets the equivalent
47
11
23,263
def get_default ( self , node ) : if node . inst . properties . get ( "woset" , False ) : return rdltypes . OnWriteType . woset elif node . inst . properties . get ( "woclr" , False ) : return rdltypes . OnWriteType . woclr else : return self . default
If not explicitly set check if woset or woclr imply the value
77
16
23,264
def assign_value ( self , comp_def , value , src_ref ) : super ( ) . assign_value ( comp_def , value , src_ref ) comp_def . properties [ 'incrthreshold' ] = value
Set both alias and actual value
51
6
23,265
def get_default ( self , node ) : if node . inst . properties . get ( "intr" , False ) : # Interrupt is set! # Default is implicitly stickybit, unless the mutually-exclusive # sticky property was set instead return not node . inst . properties . get ( "sticky" , False ) else : return False
Unless specified otherwise intr fields are implicitly stickybit
72
9
23,266
def resolve_addresses ( self , node ) : # Get alignment based on 'alignment' property # This remains constant for all children prop_alignment = self . alignment_stack [ - 1 ] if prop_alignment is None : # was not specified. Does not contribute to alignment prop_alignment = 1 prev_node = None for child_node in node . children ( skip_not_present = False ) : if not isinstance ( child_node , AddressableNode ) : continue if child_node . inst . addr_offset is not None : # Address is already known. Do not need to infer prev_node = child_node continue if node . env . chk_implicit_addr : node . env . msg . message ( node . env . chk_implicit_addr , "Address offset of component '%s' is not explicitly set" % child_node . inst . inst_name , child_node . inst . inst_src_ref ) # Get alignment specified by '%=' allocator, if any alloc_alignment = child_node . inst . addr_align if alloc_alignment is None : # was not specified. Does not contribute to alignment alloc_alignment = 1 # Calculate alignment based on current addressing mode if self . addressing_mode_stack [ - 1 ] == rdltypes . AddressingType . compact : if isinstance ( child_node , RegNode ) : # Regs are aligned based on their accesswidth mode_alignment = child_node . get_property ( 'accesswidth' ) // 8 else : # Spec does not specify for other components # Assuming absolutely compact packing mode_alignment = 1 elif self . addressing_mode_stack [ - 1 ] == rdltypes . AddressingType . regalign : # Components are aligned to a multiple of their size # Spec vaguely suggests that alignment is also a power of 2 mode_alignment = child_node . size mode_alignment = roundup_pow2 ( mode_alignment ) elif self . addressing_mode_stack [ - 1 ] == rdltypes . AddressingType . fullalign : # Same as regalign except for arrays # Arrays are aligned to their total size # Both are rounded to power of 2 mode_alignment = child_node . total_size mode_alignment = roundup_pow2 ( mode_alignment ) else : raise RuntimeError # Calculate resulting address offset alignment = max ( prop_alignment , alloc_alignment , mode_alignment ) if prev_node is None : next_offset = 0 else : next_offset = prev_node . inst . addr_offset + prev_node . total_size # round next_offset up to alignment child_node . inst . addr_offset = roundup_to ( next_offset , alignment ) prev_node = child_node # Sort children by address offset # Non-addressable child components are sorted to be first (signals) def get_child_sort_key ( inst ) : if not isinstance ( inst , comp . AddressableComponent ) : return - 1 else : return inst . addr_offset node . inst . children . sort ( key = get_child_sort_key )
Resolve addresses of children of Addrmap and Regfile components
681
13
23,267
def get_ID_text ( token ) : if isinstance ( token , CommonToken ) : text = token . text else : text = token . getText ( ) text = text . lstrip ( '\\' ) return text
Get the text from the ID token . Strips off leading slash escape if present
48
16
23,268
def derive_source_offset ( self , offset , is_end = False ) : for segment in self . segments : if offset <= segment . end : if isinstance ( segment , MacroSegment ) : if is_end : return ( segment . src_end , segment . src , segment . incl_ref ) else : return ( segment . src_start , segment . src , segment . incl_ref ) else : return ( segment . src_start + ( offset - segment . start ) , segment . src , segment . incl_ref ) # Reached end. Assume end of last segment return ( self . segments [ - 1 ] . src_end , self . segments [ - 1 ] . src , self . segments [ - 1 ] . incl_ref )
Given a post - preprocessed coordinate derives the corresponding coordinate in the original source file .
160
18
23,269
def preprocess ( self ) : tokens = self . tokenize ( ) pl_segments , has_perl_tags = self . get_perl_segments ( tokens ) # Generate flattened output str_parts = [ ] smap = segment_map . SegmentMap ( ) offset = 0 if has_perl_tags : # Needs to be processed through perl interpreter emit_list = self . run_perl_miniscript ( pl_segments ) for entry in emit_list : if entry [ 'type' ] == "ref" : pl_seg = pl_segments [ entry [ 'ref' ] ] emit_text = pl_seg . get_text ( ) map_seg = segment_map . UnalteredSegment ( offset , offset + len ( emit_text ) - 1 , pl_seg . start , pl_seg . end , pl_seg . file_pp . path , pl_seg . file_pp . incl_ref ) offset += len ( emit_text ) smap . segments . append ( map_seg ) str_parts . append ( emit_text ) elif entry [ 'type' ] == "text" : pl_seg = pl_segments [ entry [ 'ref' ] ] emit_text = entry [ 'text' ] map_seg = segment_map . MacroSegment ( offset , offset + len ( emit_text ) - 1 , pl_seg . start , pl_seg . end , pl_seg . file_pp . path , pl_seg . file_pp . incl_ref ) offset += len ( emit_text ) smap . segments . append ( map_seg ) str_parts . append ( emit_text ) else : # OK to bypass perl interpreter for pl_seg in pl_segments : emit_text = pl_seg . get_text ( ) map_seg = segment_map . UnalteredSegment ( offset , offset + len ( emit_text ) - 1 , pl_seg . start , pl_seg . end , pl_seg . file_pp . path , pl_seg . file_pp . incl_ref ) offset += len ( emit_text ) smap . segments . append ( map_seg ) str_parts . append ( emit_text ) #segment_map.print_segment_debug("".join(str_parts), smap) return ( "" . join ( str_parts ) , smap )
Run preprocessor on a top - level file .
547
10
23,270
def tokenize ( self ) : tokens = [ ] token_spec = [ ( 'mlc' , r'/\*.*?\*/' ) , ( 'slc' , r'//[^\r\n]*?\r?\n' ) , ( 'perl' , r'<%.*?%>' ) , ( 'incl' , r'`include' ) , ] tok_regex = '|' . join ( '(?P<%s>%s)' % pair for pair in token_spec ) for m in re . finditer ( tok_regex , self . text , re . DOTALL ) : if m . lastgroup in ( "incl" , "perl" ) : tokens . append ( ( m . lastgroup , m . start ( 0 ) , m . end ( 0 ) - 1 ) ) return tokens
Tokenize the input text
192
5
23,271
def parse_include ( self , start ) : # Seek back to start of line i = start while i : if self . text [ i ] == '\n' : i += 1 break i -= 1 line_start = i # check that there is no unexpected text before the include if not ( self . text [ line_start : start ] == "" or self . text [ line_start : start ] . isspace ( ) ) : self . env . msg . fatal ( "Unexpected text before include" , messages . SourceRef ( line_start , start - 1 , filename = self . path ) ) # Capture include contents inc_regex = re . compile ( r'`include\s+("([^\r\n]+)"|<([^\r\n]+)>)' ) m_inc = inc_regex . match ( self . text , start ) if m_inc is None : self . env . msg . fatal ( "Invalid usage of include directive" , messages . SourceRef ( start , start + 7 , filename = self . path ) ) incl_path_raw = m_inc . group ( 2 ) or m_inc . group ( 3 ) end = m_inc . end ( 0 ) - 1 path_start = m_inc . start ( 1 ) #[^\r\n]*?\r?\n # Check that only comments follow tail_regex = re . compile ( r'(?:[ \t]*/\*[^\r\n]*?\*/)*[ \t]*(?://[^\r\n]*?|/\*[^\r\n]*?)?\r?\n' ) if not tail_regex . match ( self . text , end + 1 ) : tail_capture_regex = re . compile ( r'[^\r\n]*?\r?\n' ) m = tail_capture_regex . match ( self . text , end + 1 ) self . env . msg . fatal ( "Unexpected text after include" , messages . SourceRef ( end + 1 , m . end ( 0 ) - 1 , filename = self . path ) ) # Resolve include path. if os . path . isabs ( incl_path_raw ) : incl_path = incl_path_raw else : # Search include paths first. for search_path in self . search_paths : incl_path = os . path . join ( search_path , incl_path_raw ) if os . path . isfile ( incl_path ) : # found match! break else : # Otherwise, assume it is relative to the current file incl_path = os . path . join ( os . path . dirname ( self . path ) , incl_path_raw ) if not os . path . isfile ( incl_path ) : self . env . msg . fatal ( "Could not find '%s' in include search paths" % incl_path_raw , messages . SourceRef ( path_start , end , filename = self . path ) ) # Check if path has already been referenced before incl_ref = self . incl_ref while incl_ref : if os . path . samefile ( incl_path , incl_ref . path ) : self . env . msg . fatal ( "Include of '%s' results in a circular reference" % incl_path_raw , messages . SourceRef ( path_start , end , filename = self . path ) ) incl_ref = incl_ref . parent return ( end , incl_path )
Extract include from text based on start position of token
770
11
23,272
def run_perl_miniscript ( self , segments ) : # Check if perl is installed if shutil . which ( "perl" ) is None : self . env . msg . fatal ( "Input contains Perl preprocessor tags, but an installation of Perl could not be found" ) # Generate minimal perl script that captures activities described in the source file lines = [ ] for i , pp_seg in enumerate ( segments ) : if isinstance ( pp_seg , PPPUnalteredSegment ) : # Text outside preprocessor tags that should remain unaltered # Insert command to emit reference to this text segment lines . append ( "rdlppp_utils::emit_ref(%d);" % i ) elif isinstance ( pp_seg , PPPPerlSegment ) : # Perl code snippet. Insert directly lines . append ( pp_seg . get_text ( ) ) elif isinstance ( pp_seg , PPPMacroSegment ) : # Preprocessor macro print tag # Insert command to store resulting text var = pp_seg . get_text ( ) # Check for any illegal characters if re . match ( r'[\s;]' , var ) : self . env . msg . fatal ( "Invalid text found in Perl macro expansion" , messages . SourceRef ( pp_seg . start , pp_seg . end , filename = self . path ) ) lines . append ( "rdlppp_utils::emit_text(%d, %s);" % ( i , var ) ) miniscript = '\n' . join ( lines ) # Run miniscript result = subprocess_run ( [ "perl" , os . path . join ( os . path . dirname ( __file__ ) , "ppp_runner.pl" ) ] , input = miniscript . encode ( "utf-8" ) , stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 5 ) if result . returncode : self . env . msg . fatal ( "Encountered a Perl syntax error while executing embedded Perl preprocessor commands:\n" + result . stderr . decode ( "utf-8" ) , # TODO: Fix useless context somehow messages . SourceRef ( filename = self . path ) ) # miniscript returns the emit list in JSON format. Convert it emit_list = json . loads ( result . stdout . decode ( 'utf-8' ) ) return emit_list
Generates and runs a perl miniscript that derives the text that will be emitted from the preprocessor
546
21
23,273
def get_default_properties ( self , comp_type ) : # Flatten out all the default assignments that apply to the current scope # This does not include any default assignments made within the current # scope, so exclude those. props = { } for scope in self . default_property_ns_stack [ : - 1 ] : props . update ( scope ) # filter out properties that are not relevant prop_names = list ( props . keys ( ) ) for prop_name in prop_names : rule = self . env . property_rules . lookup_property ( prop_name ) if rule is None : self . msg . fatal ( "Unrecognized property '%s'" % prop_name , props [ prop_name ] [ 0 ] ) if comp_type not in rule . bindable_to : del props [ prop_name ] return props
Returns a flattened dictionary of all default property assignments visible in the current scope that apply to the current component type .
179
22
23,274
def get_scope_path ( self , scope_separator = "::" ) : if self . parent_scope is None : return "" elif isinstance ( self . parent_scope , Root ) : return "" else : parent_path = self . parent_scope . get_scope_path ( scope_separator ) if parent_path : return ( parent_path + scope_separator + self . parent_scope . type_name ) else : return self . parent_scope . type_name
Generate a string that represents this component s declaration namespace scope .
107
13
23,275
def n_elements ( self ) : if self . is_array : return functools . reduce ( operator . mul , self . array_dimensions ) else : return 1
Total number of array elements . If array is multidimensional array is flattened . Returns 1 if not an array .
38
23
23,276
def walk ( self , node , * listeners : RDLListener ) : for listener in listeners : self . do_enter ( node , listener ) for child in node . children ( unroll = self . unroll , skip_not_present = self . skip_not_present ) : self . walk ( child , * listeners ) for listener in listeners : self . do_exit ( node , listener )
Initiates the walker to traverse the current node and its children . Calls the corresponding callback for each of the listeners provided in the order that they are listed .
85
33
23,277
def get_function ( fn_name ) : module_name , callable_name = fn_name . split ( ':' ) current = globals ( ) if not callable_name : callable_name = module_name else : import importlib try : module = importlib . import_module ( module_name ) except ImportError : log . error ( "failed to import %s" , module_name ) raise current = module for level in callable_name . split ( '.' ) : current = getattr ( current , level ) code = current . __code__ if code . co_argcount != 2 : raise ValueError ( 'function should take 2 arguments: lines, file_name' ) return current
Retrieve the function defined by the function_name .
153
11
23,278
def parse_command_line ( argv ) : import textwrap example = textwrap . dedent ( """ Examples: # Simple string substitution (-e). Will show a diff. No changes applied. {0} -e "re.sub('failIf', 'assertFalse', line)" *.py # File level modifications (-f). Overwrites the files in place (-w). {0} -w -f fixer:fixit *.py # Will change all test*.py in subdirectories of tests. {0} -e "re.sub('failIf', 'assertFalse', line)" -s tests test*.py """ ) . format ( os . path . basename ( argv [ 0 ] ) ) formatter_class = argparse . RawDescriptionHelpFormatter parser = argparse . ArgumentParser ( description = "Python mass editor" , epilog = example , formatter_class = formatter_class ) parser . add_argument ( "-V" , "--version" , action = "version" , version = "%(prog)s {}" . format ( __version__ ) ) parser . add_argument ( "-w" , "--write" , dest = "dry_run" , action = "store_false" , default = True , help = "modify target file(s) in place. " "Shows diff otherwise." ) parser . add_argument ( "-v" , "--verbose" , dest = "verbose_count" , action = "count" , default = 0 , help = "increases log verbosity (can be specified " "multiple times)" ) parser . add_argument ( "-e" , "--expression" , dest = "expressions" , nargs = 1 , help = "Python expressions applied to target files. " "Use the line variable to reference the current line." ) parser . add_argument ( "-f" , "--function" , dest = "functions" , nargs = 1 , help = "Python function to apply to target file. " "Takes file content as input and yield lines. " "Specify function as [module]:?<function name>." ) parser . add_argument ( "-x" , "--executable" , dest = "executables" , nargs = 1 , help = "Python executable to apply to target file." ) parser . add_argument ( "-s" , "--start" , dest = "start_dirs" , help = "Directory(ies) from which to look for targets." ) parser . add_argument ( "-m" , "--max-depth-level" , type = int , dest = "max_depth" , help = "Maximum depth when walking subdirectories." ) parser . add_argument ( "-o" , "--output" , metavar = "FILE" , type = argparse . FileType ( "w" ) , default = sys . stdout , help = "redirect output to a file" ) parser . add_argument ( "-g" , "--generate" , metavar = "FILE" , type = str , help = "generate input file suitable for -f option" ) parser . add_argument ( "--encoding" , dest = "encoding" , help = "Encoding of input and output files" ) parser . add_argument ( "--newline" , dest = "newline" , help = "Newline character for output files" ) parser . add_argument ( "patterns" , metavar = "pattern" , nargs = "*" , # argparse.REMAINDER, help = "shell-like file name patterns to process." ) arguments = parser . parse_args ( argv [ 1 : ] ) if not ( arguments . expressions or arguments . functions or arguments . generate or arguments . executables ) : parser . error ( '--expression, --function, --generate or --executable missing' ) # Sets log level to WARN going more verbose for each new -V. log . setLevel ( max ( 3 - arguments . verbose_count , 0 ) * 10 ) return arguments
Parse command line argument . See - h option .
884
11
23,279
def get_paths ( patterns , start_dirs = None , max_depth = 1 ) : # Shortcut: if there is only one pattern, make sure we process just that. if len ( patterns ) == 1 and not start_dirs : pattern = patterns [ 0 ] directory = os . path . dirname ( pattern ) if directory : patterns = [ os . path . basename ( pattern ) ] start_dirs = directory max_depth = 1 if not start_dirs or start_dirs == '.' : start_dirs = os . getcwd ( ) for start_dir in start_dirs . split ( ',' ) : for root , dirs , files in os . walk ( start_dir ) : # pylint: disable=W0612 if max_depth is not None : relpath = os . path . relpath ( root , start = start_dir ) depth = len ( relpath . split ( os . sep ) ) if depth > max_depth : continue names = [ ] for pattern in patterns : names += fnmatch . filter ( files , pattern ) for name in names : path = os . path . join ( root , name ) yield path
Retrieve files that match any of the patterns .
255
10
23,280
def edit_files ( patterns , expressions = None , functions = None , executables = None , start_dirs = None , max_depth = 1 , dry_run = True , output = sys . stdout , encoding = None , newline = None ) : if not is_list ( patterns ) : raise TypeError ( "patterns should be a list" ) if expressions and not is_list ( expressions ) : raise TypeError ( "expressions should be a list of exec expressions" ) if functions and not is_list ( functions ) : raise TypeError ( "functions should be a list of functions" ) if executables and not is_list ( executables ) : raise TypeError ( "executables should be a list of program names" ) editor = MassEdit ( dry_run = dry_run , encoding = encoding , newline = newline ) if expressions : editor . set_code_exprs ( expressions ) if functions : editor . set_functions ( functions ) if executables : editor . set_executables ( executables ) processed_paths = [ ] for path in get_paths ( patterns , start_dirs = start_dirs , max_depth = max_depth ) : try : diffs = list ( editor . edit_file ( path ) ) if dry_run : # At this point, encoding is the input encoding. diff = "" . join ( diffs ) if not diff : continue # The encoding of the target output may not match the input # encoding. If it's defined, we round trip the diff text # to bytes and back to silence any conversion errors. encoding = output . encoding if encoding : bytes_diff = diff . encode ( encoding = encoding , errors = 'ignore' ) diff = bytes_diff . decode ( encoding = output . encoding ) output . write ( diff ) except UnicodeDecodeError as err : log . error ( "failed to process %s: %s" , path , err ) continue processed_paths . append ( os . path . abspath ( path ) ) return processed_paths
Process patterns with MassEdit .
440
6
23,281
def command_line ( argv ) : arguments = parse_command_line ( argv ) if arguments . generate : generate_fixer_file ( arguments . generate ) paths = edit_files ( arguments . patterns , expressions = arguments . expressions , functions = arguments . functions , executables = arguments . executables , start_dirs = arguments . start_dirs , max_depth = arguments . max_depth , dry_run = arguments . dry_run , output = arguments . output , encoding = arguments . encoding , newline = arguments . newline ) # If the output is not sys.stdout, we need to close it because # argparse.FileType does not do it for us. is_sys = arguments . output in [ sys . stdout , sys . stderr ] if not is_sys and isinstance ( arguments . output , io . IOBase ) : arguments . output . close ( ) return paths
Instantiate an editor and process arguments .
197
8
23,282
def import_module ( module ) : # pylint: disable=R0201 if isinstance ( module , list ) : all_modules = module else : all_modules = [ module ] for mod in all_modules : globals ( ) [ mod ] = __import__ ( mod . strip ( ) )
Import module that are needed for the code expr to compile .
66
12
23,283
def __edit_line ( line , code , code_obj ) : # pylint: disable=R0201 try : # pylint: disable=eval-used result = eval ( code_obj , globals ( ) , locals ( ) ) except TypeError as ex : log . error ( "failed to execute %s: %s" , code , ex ) raise if result is None : log . error ( "cannot process line '%s' with %s" , line , code ) raise RuntimeError ( 'failed to process line' ) elif isinstance ( result , list ) or isinstance ( result , tuple ) : line = unicode ( ' ' . join ( [ unicode ( res_element ) for res_element in result ] ) ) else : line = unicode ( result ) return line
Edit a line with one code object built in the ctor .
174
13
23,284
def edit_line ( self , line ) : for code , code_obj in self . code_objs . items ( ) : line = self . __edit_line ( line , code , code_obj ) return line
Edit a single line using the code expression .
47
9
23,285
def edit_content ( self , original_lines , file_name ) : lines = [ self . edit_line ( line ) for line in original_lines ] for function in self . _functions : try : lines = list ( function ( lines , file_name ) ) except UnicodeDecodeError as err : log . error ( 'failed to process %s: %s' , file_name , err ) return lines except Exception as err : log . error ( "failed to process %s with code %s: %s" , file_name , function , err ) raise # Let the exception be handled at a higher level. return lines
Processes a file contents .
135
6
23,286
def append_code_expr ( self , code ) : # expects a string. if isinstance ( code , str ) and not isinstance ( code , unicode ) : code = unicode ( code ) if not isinstance ( code , unicode ) : raise TypeError ( "string expected" ) log . debug ( "compiling code %s..." , code ) try : code_obj = compile ( code , '<string>' , 'eval' ) self . code_objs [ code ] = code_obj except SyntaxError as syntax_err : log . error ( "cannot compile %s: %s" , code , syntax_err ) raise log . debug ( "compiled code %s" , code )
Compile argument and adds it to the list of code objects .
155
13
23,287
def append_function ( self , function ) : if not hasattr ( function , '__call__' ) : function = get_function ( function ) if not hasattr ( function , '__call__' ) : raise ValueError ( "function is expected to be callable" ) self . _functions . append ( function ) log . debug ( "registered %s" , function . __name__ )
Append the function to the list of functions to be called .
86
13
23,288
def append_executable ( self , executable ) : if isinstance ( executable , str ) and not isinstance ( executable , unicode ) : executable = unicode ( executable ) if not isinstance ( executable , unicode ) : raise TypeError ( "expected executable name as str, not {}" . format ( executable . __class__ . __name__ ) ) self . _executables . append ( executable )
Append san executable os command to the list to be called .
87
13
23,289
def set_functions ( self , functions ) : for func in functions : try : self . append_function ( func ) except ( ValueError , AttributeError ) as ex : log . error ( "'%s' is not a callable function: %s" , func , ex ) raise
Check functions passed as argument and set them to be used .
62
12
23,290
def write_mnefiff ( data , filename ) : from mne import create_info , set_log_level from mne . io import RawArray set_log_level ( WARNING ) TRIAL = 0 info = create_info ( list ( data . axis [ 'chan' ] [ TRIAL ] ) , data . s_freq , [ 'eeg' , ] * data . number_of ( 'chan' ) [ TRIAL ] ) UNITS = 1e-6 # mne wants data in uV fiff = RawArray ( data . data [ 0 ] * UNITS , info ) if data . attr [ 'chan' ] : fiff . set_channel_positions ( data . attr [ 'chan' ] . return_xyz ( ) , data . attr [ 'chan' ] . return_label ( ) ) fiff . save ( filename , overwrite = True )
Export data to MNE using FIFF format .
196
10
23,291
def detect_UCSD ( dat_orig , s_freq , time , opts ) : dat_det = transform_signal ( dat_orig , s_freq , 'wavelet_real' , opts . det_wavelet ) det_value = define_threshold ( dat_det , s_freq , 'median+std' , opts . det_thresh ) events = detect_events ( dat_det , 'maxima' , det_value ) dat_sel = transform_signal ( dat_orig , s_freq , 'wavelet_real' , opts . sel_wavelet ) sel_value = define_threshold ( dat_sel , s_freq , 'median+std' , opts . sel_thresh ) events = select_events ( dat_sel , events , 'above_thresh' , sel_value ) events = _merge_close ( dat_det , events , time , opts . tolerance ) events = within_duration ( events , time , opts . duration ) events = _merge_close ( dat_det , events , time , opts . min_interval ) events = remove_straddlers ( events , time , s_freq ) events = power_ratio ( events , dat_orig , s_freq , opts . frequency , opts . ratio_thresh ) power_peaks = peak_in_power ( events , dat_orig , s_freq , opts . power_peaks ) powers = power_in_band ( events , dat_orig , s_freq , opts . frequency ) sp_in_chan = make_spindles ( events , power_peaks , powers , dat_det , dat_orig , time , s_freq ) values = { 'det_value_lo' : det_value , 'sel_value' : sel_value } density = len ( sp_in_chan ) * s_freq * 30 / len ( dat_orig ) return sp_in_chan , values , density
Spindle detection based on the UCSD method
458
9
23,292
def detect_Concordia ( dat_orig , s_freq , time , opts ) : dat_det = transform_signal ( dat_orig , s_freq , 'butter' , opts . det_butter ) dat_det = transform_signal ( dat_det , s_freq , 'moving_rms' , opts . moving_rms ) dat_det = transform_signal ( dat_det , s_freq , 'smooth' , opts . smooth ) det_value_lo = define_threshold ( dat_det , s_freq , 'mean+std' , opts . det_thresh ) det_value_hi = define_threshold ( dat_det , s_freq , 'mean+std' , opts . det_thresh_hi ) sel_value = define_threshold ( dat_det , s_freq , 'mean+std' , opts . sel_thresh ) events = detect_events ( dat_det , 'between_thresh' , value = ( det_value_lo , det_value_hi ) ) if events is not None : events = _merge_close ( dat_det , events , time , opts . tolerance ) events = select_events ( dat_det , events , 'above_thresh' , sel_value ) events = within_duration ( events , time , opts . duration ) events = _merge_close ( dat_det , events , time , opts . min_interval ) events = remove_straddlers ( events , time , s_freq ) power_peaks = peak_in_power ( events , dat_orig , s_freq , opts . power_peaks ) powers = power_in_band ( events , dat_orig , s_freq , opts . frequency ) sp_in_chan = make_spindles ( events , power_peaks , powers , dat_det , dat_orig , time , s_freq ) else : lg . info ( 'No spindle found' ) sp_in_chan = [ ] values = { 'det_value_lo' : det_value_lo , 'sel_value' : sel_value } density = len ( sp_in_chan ) * s_freq * 30 / len ( dat_orig ) return sp_in_chan , values , density
Spindle detection experimental Concordia method . Similar to Moelle 2011 and Nir2011 .
533
17
23,293
def define_threshold ( dat , s_freq , method , value , nbins = 120 ) : if method == 'mean' : value = value * mean ( dat ) elif method == 'median' : value = value * median ( dat ) elif method == 'std' : value = value * std ( dat ) elif method == 'mean+std' : value = mean ( dat ) + value * std ( dat ) elif method == 'median+std' : value = median ( dat ) + value * std ( dat ) elif method == 'histmax' : hist = histogram ( dat , bins = nbins ) idx_maxbin = argmax ( hist [ 0 ] ) maxamp = mean ( ( hist [ 1 ] [ idx_maxbin ] , hist [ 1 ] [ idx_maxbin + 1 ] ) ) value = value * maxamp return value
Return the value of the threshold based on relative values .
196
11
23,294
def detect_events ( dat , method , value = None ) : if 'thresh' in method or 'custom' == method : if method == 'above_thresh' : above_det = dat >= value detected = _detect_start_end ( above_det ) if method == 'below_thresh' : below_det = dat < value detected = _detect_start_end ( below_det ) if method == 'between_thresh' : above_det = dat >= value [ 0 ] below_det = dat < value [ 1 ] between_det = logical_and ( above_det , below_det ) detected = _detect_start_end ( between_det ) if method == 'custom' : detected = _detect_start_end ( dat ) if detected is None : return None if method in [ 'above_thresh' , 'custom' ] : # add the location of the peak in the middle detected = insert ( detected , 1 , 0 , axis = 1 ) for i in detected : i [ 1 ] = i [ 0 ] + argmax ( dat [ i [ 0 ] : i [ 2 ] ] ) if method in [ 'below_thresh' , 'between_thresh' ] : # add the location of the trough in the middle detected = insert ( detected , 1 , 0 , axis = 1 ) for i in detected : i [ 1 ] = i [ 0 ] + argmin ( dat [ i [ 0 ] : i [ 2 ] ] ) if method == 'maxima' : peaks = argrelmax ( dat ) [ 0 ] detected = vstack ( ( peaks , peaks , peaks ) ) . T if value is not None : detected = detected [ dat [ peaks ] > value , : ] return detected
Detect events using above_thresh below_thresh or maxima method .
376
16
23,295
def select_events ( dat , detected , method , value ) : if method == 'above_thresh' : above_sel = dat >= value detected = _select_period ( detected , above_sel ) elif method == 'below_thresh' : below_sel = dat <= value detected = _select_period ( detected , below_sel ) return detected
Select start sample and end sample of the events .
77
10
23,296
def merge_close ( events , min_interval , merge_to_longer = False ) : half_iv = min_interval / 2 merged = [ ] for higher in events : if not merged : merged . append ( higher ) else : lower = merged [ - 1 ] if higher [ 'start' ] - half_iv <= lower [ 'end' ] + half_iv : if merge_to_longer and ( higher [ 'end' ] - higher [ 'start' ] > lower [ 'end' ] - lower [ 'start' ] ) : start = min ( lower [ 'start' ] , higher [ 'start' ] ) higher . update ( { 'start' : start } ) merged [ - 1 ] = higher else : end = max ( lower [ 'end' ] , higher [ 'end' ] ) merged [ - 1 ] . update ( { 'end' : end } ) else : merged . append ( higher ) return merged
Merge events that are separated by a less than a minimum interval .
204
14
23,297
def within_duration ( events , time , limits ) : min_dur = max_dur = ones ( events . shape [ 0 ] , dtype = bool ) if limits [ 0 ] is not None : min_dur = time [ events [ : , - 1 ] - 1 ] - time [ events [ : , 0 ] ] >= limits [ 0 ] if limits [ 1 ] is not None : max_dur = time [ events [ : , - 1 ] - 1 ] - time [ events [ : , 0 ] ] <= limits [ 1 ] return events [ min_dur & max_dur , : ]
Check whether event is within time limits .
133
8
23,298
def remove_straddlers ( events , time , s_freq , toler = 0.1 ) : dur = ( events [ : , - 1 ] - 1 - events [ : , 0 ] ) / s_freq continuous = time [ events [ : , - 1 ] - 1 ] - time [ events [ : , 0 ] ] - dur < toler return events [ continuous , : ]
Reject an event if it straddles a stitch by comparing its duration to its timespan .
83
20
23,299
def power_ratio ( events , dat , s_freq , limits , ratio_thresh ) : ratio = empty ( events . shape [ 0 ] ) for i , one_event in enumerate ( events ) : x0 = one_event [ 0 ] x1 = one_event [ 2 ] if x0 < 0 or x1 >= len ( dat ) : ratio [ i ] = 0 else : f , Pxx = periodogram ( dat [ x0 : x1 ] , s_freq , scaling = 'spectrum' ) Pxx = sqrt ( Pxx ) # use amplitude freq_sp = ( f >= limits [ 0 ] ) & ( f <= limits [ 1 ] ) freq_nonsp = ( f <= limits [ 1 ] ) ratio [ i ] = mean ( Pxx [ freq_sp ] ) / mean ( Pxx [ freq_nonsp ] ) events = events [ ratio > ratio_thresh , : ] return events
Estimate the ratio in power between spindle band and lower frequencies .
210
14