idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
21,600
def Append ( self , * * kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : # It would be easier on the eyes to write this using # "continue" statements whenever we finish processing an item, # but Python 1.5.2 apparently doesn't let you use "continue" # within try:-except: blocks, so we have to nest our code. try : if key == 'CPPDEFINES' and SCons . Util . is_String ( self . _dict [ key ] ) : self . _dict [ key ] = [ self . _dict [ key ] ] orig = self . _dict [ key ] except KeyError : # No existing variable in the environment, so just set # it to the new value. if key == 'CPPDEFINES' and SCons . Util . is_String ( val ) : self . _dict [ key ] = [ val ] else : self . _dict [ key ] = val else : try : # Check if the original looks like a dictionary. # If it is, we can't just try adding the value because # dictionaries don't have __add__() methods, and # things like UserList will incorrectly coerce the # original dict to a list (which we don't want). update_dict = orig . update except AttributeError : try : # Most straightforward: just try to add them # together. This will work in most cases, when the # original and new values are of compatible types. self . _dict [ key ] = orig + val except ( KeyError , TypeError ) : try : # Check if the original is a list. add_to_orig = orig . append except AttributeError : # The original isn't a list, but the new # value is (by process of elimination), # so insert the original in the new value # (if there's one to insert) and replace # the variable with it. if orig : val . insert ( 0 , orig ) self . _dict [ key ] = val else : # The original is a list, so append the new # value to it (if there's a value to append). if val : add_to_orig ( val ) else : # The original looks like a dictionary, so update it # based on what we think the value looks like. if SCons . Util . is_List ( val ) : if key == 'CPPDEFINES' : tmp = [ ] for ( k , v ) in orig . items ( ) : if v is not None : tmp . append ( ( k , v ) ) else : tmp . append ( ( k , ) ) orig = tmp orig += val self . _dict [ key ] = orig else : for v in val : orig [ v ] = None else : try : update_dict ( val ) except ( AttributeError , TypeError , ValueError ) : if SCons . Util . is_Dict ( val ) : for k , v in val . items ( ) : orig [ k ] = v else : orig [ val ] = None self . scanner_map_delete ( kw )
Append values to existing construction variables in an Environment .
675
11
21,601
def AppendENVPath ( self , name , newpath , envname = 'ENV' , sep = os . pathsep , delete_existing = 1 ) : orig = '' if envname in self . _dict and name in self . _dict [ envname ] : orig = self . _dict [ envname ] [ name ] nv = SCons . Util . AppendPath ( orig , newpath , sep , delete_existing , canonicalize = self . _canonicalize ) if envname not in self . _dict : self . _dict [ envname ] = { } self . _dict [ envname ] [ name ] = nv
Append path elements to the path name in the ENV dictionary for this environment . Will only add any particular path once and will normpath and normcase all paths to help assure this . This can also handle the case where the env variable is a list instead of a string .
142
56
21,602
def Detect ( self , progs ) : if not SCons . Util . is_List ( progs ) : progs = [ progs ] for prog in progs : path = self . WhereIs ( prog ) if path : return prog return None
Return the first available program in progs .
54
9
21,603
def Dump ( self , key = None ) : import pprint pp = pprint . PrettyPrinter ( indent = 2 ) if key : dict = self . Dictionary ( key ) else : dict = self . Dictionary ( ) return pp . pformat ( dict )
Using the standard Python pretty printer return the contents of the scons build environment as a string .
55
19
21,604
def FindIxes ( self , paths , prefix , suffix ) : suffix = self . subst ( '$' + suffix ) prefix = self . subst ( '$' + prefix ) for path in paths : dir , name = os . path . split ( str ( path ) ) if name [ : len ( prefix ) ] == prefix and name [ - len ( suffix ) : ] == suffix : return path
Search a list of paths for something that matches the prefix and suffix .
84
14
21,605
def ParseDepends ( self , filename , must_exist = None , only_one = 0 ) : filename = self . subst ( filename ) try : fp = open ( filename , 'r' ) except IOError : if must_exist : raise return lines = SCons . Util . LogicalLines ( fp ) . readlines ( ) lines = [ l for l in lines if l [ 0 ] != '#' ] tdlist = [ ] for line in lines : try : target , depends = line . split ( ':' , 1 ) except ( AttributeError , ValueError ) : # Throws AttributeError if line isn't a string. Can throw # ValueError if line doesn't split into two or more elements. pass else : tdlist . append ( ( target . split ( ) , depends . split ( ) ) ) if only_one : targets = [ ] for td in tdlist : targets . extend ( td [ 0 ] ) if len ( targets ) > 1 : raise SCons . Errors . UserError ( "More than one dependency target found in `%s': %s" % ( filename , targets ) ) for target , depends in tdlist : self . Depends ( target , depends )
Parse a mkdep - style file for explicit dependencies . This is completely abusable and should be unnecessary in the normal case of proper SCons configuration but it may help make the transition from a Make hierarchy easier for some people to swallow . It can also be genuinely useful when using a tool that can write a . d file but for which writing a scanner would be too complicated .
260
76
21,606
def Prepend ( self , * * kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : # It would be easier on the eyes to write this using # "continue" statements whenever we finish processing an item, # but Python 1.5.2 apparently doesn't let you use "continue" # within try:-except: blocks, so we have to nest our code. try : orig = self . _dict [ key ] except KeyError : # No existing variable in the environment, so just set # it to the new value. self . _dict [ key ] = val else : try : # Check if the original looks like a dictionary. # If it is, we can't just try adding the value because # dictionaries don't have __add__() methods, and # things like UserList will incorrectly coerce the # original dict to a list (which we don't want). update_dict = orig . update except AttributeError : try : # Most straightforward: just try to add them # together. This will work in most cases, when the # original and new values are of compatible types. self . _dict [ key ] = val + orig except ( KeyError , TypeError ) : try : # Check if the added value is a list. add_to_val = val . append except AttributeError : # The added value isn't a list, but the # original is (by process of elimination), # so insert the the new value in the original # (if there's one to insert). if val : orig . insert ( 0 , val ) else : # The added value is a list, so append # the original to it (if there's a value # to append). if orig : add_to_val ( orig ) self . _dict [ key ] = val else : # The original looks like a dictionary, so update it # based on what we think the value looks like. if SCons . Util . is_List ( val ) : for v in val : orig [ v ] = None else : try : update_dict ( val ) except ( AttributeError , TypeError , ValueError ) : if SCons . Util . is_Dict ( val ) : for k , v in val . items ( ) : orig [ k ] = v else : orig [ val ] = None self . scanner_map_delete ( kw )
Prepend values to existing construction variables in an Environment .
514
11
21,607
def PrependENVPath ( self , name , newpath , envname = 'ENV' , sep = os . pathsep , delete_existing = 1 ) : orig = '' if envname in self . _dict and name in self . _dict [ envname ] : orig = self . _dict [ envname ] [ name ] nv = SCons . Util . PrependPath ( orig , newpath , sep , delete_existing , canonicalize = self . _canonicalize ) if envname not in self . _dict : self . _dict [ envname ] = { } self . _dict [ envname ] [ name ] = nv
Prepend path elements to the path name in the ENV dictionary for this environment . Will only add any particular path once and will normpath and normcase all paths to help assure this . This can also handle the case where the env variable is a list instead of a string .
142
56
21,608
def PrependUnique ( self , delete_existing = 0 , * * kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : if SCons . Util . is_List ( val ) : val = _delete_duplicates ( val , not delete_existing ) if key not in self . _dict or self . _dict [ key ] in ( '' , None ) : self . _dict [ key ] = val elif SCons . Util . is_Dict ( self . _dict [ key ] ) and SCons . Util . is_Dict ( val ) : self . _dict [ key ] . update ( val ) elif SCons . Util . is_List ( val ) : dk = self . _dict [ key ] if not SCons . Util . is_List ( dk ) : dk = [ dk ] if delete_existing : dk = [ x for x in dk if x not in val ] else : val = [ x for x in val if x not in dk ] self . _dict [ key ] = val + dk else : dk = self . _dict [ key ] if SCons . Util . is_List ( dk ) : # By elimination, val is not a list. Since dk is a # list, wrap val in a list first. if delete_existing : dk = [ x for x in dk if x not in val ] self . _dict [ key ] = [ val ] + dk else : if not val in dk : self . _dict [ key ] = [ val ] + dk else : if delete_existing : dk = [ x for x in dk if x not in val ] self . _dict [ key ] = val + dk self . scanner_map_delete ( kw )
Prepend values to existing construction variables in an Environment if they re not already there . If delete_existing is 1 removes existing values first so values move to front .
411
33
21,609
def ReplaceIxes ( self , path , old_prefix , old_suffix , new_prefix , new_suffix ) : old_prefix = self . subst ( '$' + old_prefix ) old_suffix = self . subst ( '$' + old_suffix ) new_prefix = self . subst ( '$' + new_prefix ) new_suffix = self . subst ( '$' + new_suffix ) dir , name = os . path . split ( str ( path ) ) if name [ : len ( old_prefix ) ] == old_prefix : name = name [ len ( old_prefix ) : ] if name [ - len ( old_suffix ) : ] == old_suffix : name = name [ : - len ( old_suffix ) ] return os . path . join ( dir , new_prefix + name + new_suffix )
Replace old_prefix with new_prefix and old_suffix with new_suffix .
192
20
21,610
def WhereIs ( self , prog , path = None , pathext = None , reject = [ ] ) : if path is None : try : path = self [ 'ENV' ] [ 'PATH' ] except KeyError : pass elif SCons . Util . is_String ( path ) : path = self . subst ( path ) if pathext is None : try : pathext = self [ 'ENV' ] [ 'PATHEXT' ] except KeyError : pass elif SCons . Util . is_String ( pathext ) : pathext = self . subst ( pathext ) prog = SCons . Util . CLVar ( self . subst ( prog ) ) # support "program --with-args" path = SCons . Util . WhereIs ( prog [ 0 ] , path , pathext , reject ) if path : return path return None
Find prog in the path .
191
6
21,611
def Command ( self , target , source , action , * * kw ) : bkw = { 'action' : action , 'target_factory' : self . fs . Entry , 'source_factory' : self . fs . Entry , } try : bkw [ 'source_scanner' ] = kw [ 'source_scanner' ] except KeyError : pass else : del kw [ 'source_scanner' ] bld = SCons . Builder . Builder ( * * bkw ) return bld ( self , target , source , * * kw )
Builds the supplied target files from the supplied source files using the supplied action . Action may be any type that the Builder constructor will accept for an action .
125
31
21,612
def Depends ( self , target , dependency ) : tlist = self . arg2nodes ( target , self . fs . Entry ) dlist = self . arg2nodes ( dependency , self . fs . Entry ) for t in tlist : t . add_dependency ( dlist ) return tlist
Explicity specify that target s depend on dependency .
66
10
21,613
def NoClean ( self , * targets ) : tlist = [ ] for t in targets : tlist . extend ( self . arg2nodes ( t , self . fs . Entry ) ) for t in tlist : t . set_noclean ( ) return tlist
Tags a target so that it will not be cleaned by - c
59
13
21,614
def NoCache ( self , * targets ) : tlist = [ ] for t in targets : tlist . extend ( self . arg2nodes ( t , self . fs . Entry ) ) for t in tlist : t . set_nocache ( ) return tlist
Tags a target so that it will not be cached
59
10
21,615
def Execute ( self , action , * args , * * kw ) : action = self . Action ( action , * args , * * kw ) result = action ( [ ] , [ ] , self ) if isinstance ( result , SCons . Errors . BuildError ) : errstr = result . errstr if result . filename : errstr = result . filename + ': ' + errstr sys . stderr . write ( "scons: *** %s\n" % errstr ) return result . status else : return result
Directly execute an action through an Environment
115
8
21,616
def Ignore ( self , target , dependency ) : tlist = self . arg2nodes ( target , self . fs . Entry ) dlist = self . arg2nodes ( dependency , self . fs . Entry ) for t in tlist : t . add_ignore ( dlist ) return tlist
Ignore a dependency .
64
5
21,617
def SideEffect ( self , side_effect , target ) : side_effects = self . arg2nodes ( side_effect , self . fs . Entry ) targets = self . arg2nodes ( target , self . fs . Entry ) for side_effect in side_effects : if side_effect . multiple_side_effect_has_builder ( ) : raise SCons . Errors . UserError ( "Multiple ways to build the same target were specified for: %s" % str ( side_effect ) ) side_effect . add_source ( targets ) side_effect . side_effect = 1 self . Precious ( side_effect ) for target in targets : target . side_effects . append ( side_effect ) return side_effects
Tell scons that side_effects are built as side effects of building targets .
158
16
21,618
def Split ( self , arg ) : if SCons . Util . is_List ( arg ) : return list ( map ( self . subst , arg ) ) elif SCons . Util . is_String ( arg ) : return self . subst ( arg ) . split ( ) else : return [ self . subst ( arg ) ]
This function converts a string or list into a list of strings or Nodes . This makes things easier for users by allowing files to be specified as a white - space separated list to be split .
71
39
21,619
def FindSourceFiles ( self , node = '.' ) : node = self . arg2nodes ( node , self . fs . Entry ) [ 0 ] sources = [ ] def build_source ( ss ) : for s in ss : if isinstance ( s , SCons . Node . FS . Dir ) : build_source ( s . all_children ( ) ) elif s . has_builder ( ) : build_source ( s . sources ) elif isinstance ( s . disambiguate ( ) , SCons . Node . FS . File ) : sources . append ( s ) build_source ( node . all_children ( ) ) def final_source ( node ) : while ( node != node . srcnode ( ) ) : node = node . srcnode ( ) return node sources = list ( map ( final_source , sources ) ) # remove duplicates return list ( set ( sources ) )
returns a list of all source files .
193
9
21,620
def FindInstalledFiles ( self ) : from SCons . Tool import install if install . _UNIQUE_INSTALLED_FILES is None : install . _UNIQUE_INSTALLED_FILES = SCons . Util . uniquer_hashables ( install . _INSTALLED_FILES ) return install . _UNIQUE_INSTALLED_FILES
returns the list of all targets of the Install and InstallAs Builder .
84
15
21,621
def generate ( env ) : global PDFLaTeXAction if PDFLaTeXAction is None : PDFLaTeXAction = SCons . Action . Action ( '$PDFLATEXCOM' , '$PDFLATEXCOMSTR' ) global PDFLaTeXAuxAction if PDFLaTeXAuxAction is None : PDFLaTeXAuxAction = SCons . Action . Action ( PDFLaTeXAuxFunction , strfunction = SCons . Tool . tex . TeXLaTeXStrFunction ) env . AppendUnique ( LATEXSUFFIXES = SCons . Tool . LaTeXSuffixes ) from . import pdf pdf . generate ( env ) bld = env [ 'BUILDERS' ] [ 'PDF' ] bld . add_action ( '.ltx' , PDFLaTeXAuxAction ) bld . add_action ( '.latex' , PDFLaTeXAuxAction ) bld . add_emitter ( '.ltx' , SCons . Tool . tex . tex_pdf_emitter ) bld . add_emitter ( '.latex' , SCons . Tool . tex . tex_pdf_emitter ) SCons . Tool . tex . generate_common ( env )
Add Builders and construction variables for pdflatex to an Environment .
271
16
21,622
def installShlibLinks ( dest , source , env ) : Verbose = False symlinks = listShlibLinksToInstall ( dest , source , env ) if Verbose : print ( 'installShlibLinks: symlinks={:r}' . format ( SCons . Tool . StringizeLibSymlinks ( symlinks ) ) ) if symlinks : SCons . Tool . CreateLibSymlinks ( env , symlinks ) return
If we are installing a versioned shared library create the required links .
91
14
21,623
def installFunc ( target , source , env ) : try : install = env [ 'INSTALL' ] except KeyError : raise SCons . Errors . UserError ( 'Missing INSTALL construction variable.' ) assert len ( target ) == len ( source ) , "Installing source %s into target %s: target and source lists must have same length." % ( list ( map ( str , source ) ) , list ( map ( str , target ) ) ) for t , s in zip ( target , source ) : if install ( t . get_path ( ) , s . get_path ( ) , env ) : return 1 return 0
Install a source file into a target using the function specified as the INSTALL construction variable .
135
18
21,624
def installFuncVersionedLib ( target , source , env ) : try : install = env [ 'INSTALLVERSIONEDLIB' ] except KeyError : raise SCons . Errors . UserError ( 'Missing INSTALLVERSIONEDLIB construction variable.' ) assert len ( target ) == len ( source ) , "Installing source %s into target %s: target and source lists must have same length." % ( list ( map ( str , source ) ) , list ( map ( str , target ) ) ) for t , s in zip ( target , source ) : if hasattr ( t . attributes , 'shlibname' ) : tpath = os . path . join ( t . get_dir ( ) , t . attributes . shlibname ) else : tpath = t . get_path ( ) if install ( tpath , s . get_path ( ) , env ) : return 1 return 0
Install a versioned library into a target using the function specified as the INSTALLVERSIONEDLIB construction variable .
191
22
21,625
def parse_multiple_rpcs ( cls , record_data ) : rpcs = [ ] while len ( record_data ) > 0 : total_length , record_type = struct . unpack_from ( "<LB3x" , record_data ) if record_type != SendErrorCheckingRPCRecord . RecordType : raise ArgumentError ( "Record set contains a record that is not an error checking RPC" , record_type = record_type ) record_contents = record_data [ 8 : total_length ] parsed_rpc = cls . _parse_rpc_info ( record_contents ) rpcs . append ( parsed_rpc ) record_data = record_data [ total_length : ] return rpcs
Parse record_data into multiple error checking rpcs .
165
13
21,626
def generate ( env ) : SCons . Tool . cc . generate ( env ) env [ 'CC' ] = env . Detect ( compilers ) or 'clang' if env [ 'PLATFORM' ] in [ 'cygwin' , 'win32' ] : env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS' ) else : env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS -fPIC' ) # determine compiler version if env [ 'CC' ] : #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'], pipe = SCons . Action . _subproc ( env , [ env [ 'CC' ] , '--version' ] , stdin = 'devnull' , stderr = 'devnull' , stdout = subprocess . PIPE ) if pipe . wait ( ) != 0 : return # clang -dumpversion is of no use line = pipe . stdout . readline ( ) if sys . version_info [ 0 ] > 2 : line = line . decode ( ) match = re . search ( r'clang +version +([0-9]+(?:\.[0-9]+)+)' , line ) if match : env [ 'CCVERSION' ] = match . group ( 1 )
Add Builders and construction variables for clang to an Environment .
307
13
21,627
def wait_running ( self , timeout = None ) : flag = self . _running . wait ( timeout ) if flag is False : raise TimeoutExpiredError ( "Timeout waiting for thread to start running" )
Wait for the thread to pass control to its routine .
45
11
21,628
def create_event ( self , register = False ) : event = asyncio . Event ( loop = self . _loop ) if register : self . _events . add ( event ) return event
Create an asyncio . Event inside the emulation loop .
40
11
21,629
def create_queue ( self , register = False ) : queue = asyncio . Queue ( loop = self . _loop ) if register : self . _work_queues . add ( queue ) return queue
Create a new work queue and optionally register it .
44
10
21,630
def start ( self ) : if self . _started is True : raise ArgumentError ( "EmulationLoop.start() called multiple times" ) self . _thread = threading . Thread ( target = self . _loop_thread_main ) self . _thread . start ( ) self . _started = True
Start the background emulation loop .
65
6
21,631
def stop ( self ) : if self . _started is False : raise ArgumentError ( "EmulationLoop.stop() called without calling start()" ) self . verify_calling_thread ( False , "Cannot call EmulationLoop.stop() from inside the event loop" ) if self . _thread . is_alive ( ) : self . _loop . call_soon_threadsafe ( self . _loop . create_task , self . _clean_shutdown ( ) ) self . _thread . join ( )
Stop the background emulation loop .
111
6
21,632
def wait_idle ( self , timeout = 1.0 ) : async def _awaiter ( ) : background_work = { x . join ( ) for x in self . _work_queues } for event in self . _events : if not event . is_set ( ) : background_work . add ( event . wait ( ) ) _done , pending = await asyncio . wait ( background_work , timeout = timeout ) if len ( pending ) > 0 : raise TimeoutExpiredError ( "Timeout waiting for event loop to become idle" , pending = pending ) if self . _on_emulation_thread ( ) : return asyncio . wait_for ( _awaiter ( ) , timeout = timeout ) self . run_task_external ( _awaiter ( ) ) return None
Wait until the rpc queue is empty .
170
9
21,633
def run_task_external ( self , coroutine ) : self . verify_calling_thread ( False , 'run_task_external must not be called from the emulation thread' ) future = asyncio . run_coroutine_threadsafe ( coroutine , self . _loop ) return future . result ( )
Inject a task into the emulation loop and wait for it to finish .
66
15
21,634
def call_rpc_external ( self , address , rpc_id , arg_payload , timeout = 10.0 ) : self . verify_calling_thread ( False , "call_rpc_external is for use **outside** of the event loop" ) response = CrossThreadResponse ( ) self . _loop . call_soon_threadsafe ( self . _rpc_queue . put_rpc , address , rpc_id , arg_payload , response ) try : return response . wait ( timeout ) except RPCRuntimeError as err : return err . binary_error
Call an RPC from outside of the event loop and block until it finishes .
126
15
21,635
async def await_rpc ( self , address , rpc_id , * args , * * kwargs ) : self . verify_calling_thread ( True , "await_rpc must be called from **inside** the event loop" ) if isinstance ( rpc_id , RPCDeclaration ) : arg_format = rpc_id . arg_format resp_format = rpc_id . resp_format rpc_id = rpc_id . rpc_id else : arg_format = kwargs . get ( 'arg_format' , None ) resp_format = kwargs . get ( 'resp_format' , None ) arg_payload = b'' if arg_format is not None : arg_payload = pack_rpc_payload ( arg_format , args ) self . _logger . debug ( "Sending rpc to %d:%04X, payload=%s" , address , rpc_id , args ) response = AwaitableResponse ( ) self . _rpc_queue . put_rpc ( address , rpc_id , arg_payload , response ) try : resp_payload = await response . wait ( 1.0 ) except RPCRuntimeError as err : resp_payload = err . binary_error if resp_format is None : return [ ] resp = unpack_rpc_payload ( resp_format , resp_payload ) return resp
Send an RPC from inside the EmulationLoop .
315
10
21,636
def verify_calling_thread ( self , should_be_emulation , message = None ) : if should_be_emulation == self . _on_emulation_thread ( ) : return if message is None : message = "Operation performed on invalid thread" raise InternalError ( message )
Verify if the calling thread is or is not the emulation thread .
62
14
21,637
def add_task ( self , tile_address , coroutine ) : self . _loop . call_soon_threadsafe ( self . _add_task , tile_address , coroutine )
Add a task into the event loop .
41
8
21,638
async def stop_tasks ( self , address ) : tasks = self . _tasks . get ( address , [ ] ) for task in tasks : task . cancel ( ) asyncio . gather ( * tasks , return_exceptions = True ) self . _tasks [ address ] = [ ]
Clear all tasks pertaining to a tile .
64
8
21,639
async def _clean_shutdown ( self ) : # Cleanly stop any other outstanding tasks not associated with tiles remaining_tasks = [ ] for task in self . _tasks . get ( None , [ ] ) : self . _logger . debug ( "Cancelling task at shutdown %s" , task ) task . cancel ( ) remaining_tasks . append ( task ) asyncio . gather ( * remaining_tasks , return_exceptions = True ) if len ( remaining_tasks ) > 0 : del self . _tasks [ None ] # Shutdown tasks associated with each tile remaining_tasks = [ ] for address in sorted ( self . _tasks , reverse = True ) : if address is None : continue self . _logger . debug ( "Shutting down tasks for tile at %d" , address ) for task in self . _tasks . get ( address , [ ] ) : task . cancel ( ) remaining_tasks . append ( task ) asyncio . gather ( * remaining_tasks , return_exceptions = True ) await self . _rpc_queue . stop ( ) self . _loop . stop ( )
Cleanly shutdown the emulation loop .
248
7
21,640
def _add_task ( self , tile_address , coroutine ) : self . verify_calling_thread ( True , "_add_task is not thread safe" ) if tile_address not in self . _tasks : self . _tasks [ tile_address ] = [ ] task = self . _loop . create_task ( coroutine ) self . _tasks [ tile_address ] . append ( task )
Add a task from within the event loop .
90
9
21,641
def key_rule ( self , regex , verifier ) : if regex is not None : regex = re . compile ( regex ) self . _additional_key_rules . append ( ( regex , verifier ) )
Add a rule with a pattern that should apply to all keys .
46
13
21,642
def stream ( self , report , callback = None ) : conn_id = self . _find_connection ( self . conn_string ) if isinstance ( report , BroadcastReport ) : self . adapter . notify_event_nowait ( self . conn_string , 'broadcast' , report ) elif conn_id is not None : self . adapter . notify_event_nowait ( self . conn_string , 'report' , report ) if callback is not None : callback ( isinstance ( report , BroadcastReport ) or ( conn_id is not None ) )
Queue data for streaming
121
4
21,643
def trace ( self , data , callback = None ) : conn_id = self . _find_connection ( self . conn_string ) if conn_id is not None : self . adapter . notify_event_nowait ( self . conn_string , 'trace' , data ) if callback is not None : callback ( conn_id is not None )
Queue data for tracing
75
4
21,644
def _load_device ( self , name , config ) : if config is None : config_dict = { } elif isinstance ( config , dict ) : config_dict = config elif config [ 0 ] == '#' : # Allow passing base64 encoded json directly in the port string to ease testing. import base64 config_str = str ( base64 . b64decode ( config [ 1 : ] ) , 'utf-8' ) config_dict = json . loads ( config_str ) else : try : with open ( config , "r" ) as conf : data = json . load ( conf ) except IOError as exc : raise ArgumentError ( "Could not open config file" , error = str ( exc ) , path = config ) if 'device' not in data : raise ArgumentError ( "Invalid configuration file passed to VirtualDeviceAdapter" , device_name = name , config_path = config , missing_key = 'device' ) config_dict = data [ 'device' ] reg = ComponentRegistry ( ) if name . endswith ( '.py' ) : _name , device_factory = reg . load_extension ( name , class_filter = VirtualIOTileDevice , unique = True ) return device_factory ( config_dict ) seen_names = [ ] for device_name , device_factory in reg . load_extensions ( 'iotile.virtual_device' , class_filter = VirtualIOTileDevice , product_name = "virtual_device" ) : if device_name == name : return device_factory ( config_dict ) seen_names . append ( device_name ) raise ArgumentError ( "Could not find virtual_device by name" , name = name , known_names = seen_names )
Load a device either from a script or from an installed module
381
12
21,645
async def disconnect ( self , conn_id ) : self . _ensure_connection ( conn_id , True ) dev = self . _get_property ( conn_id , 'device' ) dev . connected = False self . _teardown_connection ( conn_id )
Asynchronously disconnect from a connected device
61
8
21,646
async def _send_scan_event ( self , device ) : conn_string = str ( device . iotile_id ) info = { 'connection_string' : conn_string , 'uuid' : device . iotile_id , 'signal_strength' : 100 , 'validity_period' : self . ExpirationTime } await self . notify_event ( conn_string , 'device_seen' , info )
Send a scan event from a device .
96
8
21,647
def rpc_name ( rpc_id ) : name = _RPC_NAME_MAP . get ( rpc_id ) if name is None : name = 'RPC 0x%04X' % rpc_id return name
Map an RPC id to a string name .
52
9
21,648
def stream_name ( stream_id ) : name = _STREAM_NAME_MAP . get ( stream_id ) if name is None : name = str ( DataStream . FromEncoded ( stream_id ) ) return "{} (0x{:04X})" . format ( name , stream_id )
Map a stream id to a human readable name .
68
10
21,649
def set_option ( self , name , value ) : if not name in self . settable : raise SCons . Errors . UserError ( "This option is not settable from a SConscript file: %s" % name ) if name == 'num_jobs' : try : value = int ( value ) if value < 1 : raise ValueError except ValueError : raise SCons . Errors . UserError ( "A positive integer is required: %s" % repr ( value ) ) elif name == 'max_drift' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'duplicate' : try : value = str ( value ) except ValueError : raise SCons . Errors . UserError ( "A string is required: %s" % repr ( value ) ) if not value in SCons . Node . FS . Valid_Duplicates : raise SCons . Errors . UserError ( "Not a valid duplication style: %s" % value ) # Set the duplicate style right away so it can affect linking # of SConscript files. SCons . Node . FS . set_duplicate ( value ) elif name == 'diskcheck' : try : value = diskcheck_convert ( value ) except ValueError as v : raise SCons . Errors . UserError ( "Not a valid diskcheck value: %s" % v ) if 'diskcheck' not in self . __dict__ : # No --diskcheck= option was specified on the command line. # Set this right away so it can affect the rest of the # file/Node lookups while processing the SConscript files. SCons . Node . FS . set_diskcheck ( value ) elif name == 'stack_size' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'md5_chunksize' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'warn' : if SCons . Util . is_String ( value ) : value = [ value ] value = self . __SConscript_settings__ . get ( name , [ ] ) + value SCons . Warnings . process_warn_strings ( value ) self . __SConscript_settings__ [ name ] = value
Sets an option from an SConscript file .
559
11
21,650
def format_help ( self , formatter ) : formatter . dedent ( ) result = formatter . format_heading ( self . title ) formatter . indent ( ) result = result + optparse . OptionContainer . format_help ( self , formatter ) return result
Format an option group s help text outdenting the title so it s flush with the SCons Options title we print at the top .
58
28
21,651
def _process_long_opt ( self , rargs , values ) : arg = rargs . pop ( 0 ) # Value explicitly attached to arg? Pretend it's the next # argument. if "=" in arg : ( opt , next_arg ) = arg . split ( "=" , 1 ) rargs . insert ( 0 , next_arg ) had_explicit_value = True else : opt = arg had_explicit_value = False try : opt = self . _match_long_opt ( opt ) except optparse . BadOptionError : if self . preserve_unknown_options : # SCons-specific: if requested, add unknown options to # the "leftover arguments" list for later processing. self . largs . append ( arg ) if had_explicit_value : # The unknown option will be re-processed later, # so undo the insertion of the explicit value. rargs . pop ( 0 ) return raise option = self . _long_opt [ opt ] if option . takes_value ( ) : nargs = option . nargs if nargs == '?' : if had_explicit_value : value = rargs . pop ( 0 ) else : value = option . const elif len ( rargs ) < nargs : if nargs == 1 : if not option . choices : self . error ( _ ( "%s option requires an argument" ) % opt ) else : msg = _ ( "%s option requires an argument " % opt ) msg += _ ( "(choose from %s)" % ', ' . join ( option . choices ) ) self . error ( msg ) else : self . error ( _ ( "%s option requires %d arguments" ) % ( opt , nargs ) ) elif nargs == 1 : value = rargs . pop ( 0 ) else : value = tuple ( rargs [ 0 : nargs ] ) del rargs [ 0 : nargs ] elif had_explicit_value : self . error ( _ ( "%s option does not take a value" ) % opt ) else : value = None option . process ( opt , value , values , self )
SCons - specific processing of long options .
453
9
21,652
def add_local_option ( self , * args , * * kw ) : try : group = self . local_option_group except AttributeError : group = SConsOptionGroup ( self , 'Local Options' ) group = self . add_option_group ( group ) self . local_option_group = group result = group . add_option ( * args , * * kw ) if result : # The option was added successfully. We now have to add the # default value to our object that holds the default values # (so that an attempt to fetch the option's attribute will # yield the default value when not overridden) and then # we re-parse the leftover command-line options, so that # any value overridden on the command line is immediately # available if the user turns around and does a GetOption() # right away. setattr ( self . values . __defaults__ , result . dest , result . default ) self . reparse_local_options ( ) return result
Adds a local option to the parser .
211
8
21,653
def format_heading ( self , heading ) : if heading == 'Options' : heading = "SCons Options" return optparse . IndentedHelpFormatter . format_heading ( self , heading )
This translates any heading of options or Options into SCons Options . Unfortunately we have to do this here because those titles are hard - coded in the optparse calls .
42
33
21,654
def to_dict ( self ) : out_dict = { } out_dict [ 'commands' ] = self . commands out_dict [ 'configs' ] = self . configs out_dict [ 'short_name' ] = self . name out_dict [ 'versions' ] = { 'module' : self . module_version , 'api' : self . api_version } return out_dict
Convert this object into a dictionary .
89
8
21,655
def set_api_version ( self , major , minor ) : if not self . _is_byte ( major ) or not self . _is_byte ( minor ) : raise ArgumentError ( "Invalid API version number with component that does not fit in 1 byte" , major = major , minor = minor ) self . api_version = ( major , minor )
Set the API version this module was designed for .
76
10
21,656
def set_module_version ( self , major , minor , patch ) : if not ( self . _is_byte ( major ) and self . _is_byte ( minor ) and self . _is_byte ( patch ) ) : raise ArgumentError ( "Invalid module version number with component that does not fit in 1 byte" , major = major , minor = minor , patch = patch ) self . module_version = ( major , minor , patch )
Set the module version for this module .
95
8
21,657
def set_name ( self , name ) : if len ( name ) > 6 : raise ArgumentError ( "Name must be at most 6 characters long" , name = name ) if len ( name ) < 6 : name += ' ' * ( 6 - len ( name ) ) self . name = name
Set the module name to a 6 byte string
63
9
21,658
def add_command ( self , cmd_id , handler ) : if cmd_id < 0 or cmd_id >= 2 ** 16 : raise ArgumentError ( "Command ID in mib block is not a non-negative 2-byte number" , cmd_id = cmd_id , handler = handler ) if cmd_id in self . commands : raise ArgumentError ( "Attempted to add the same command ID twice." , cmd_id = cmd_id , existing_handler = self . commands [ cmd_id ] , new_handler = handler ) self . commands [ cmd_id ] = handler
Add a command to the TBBlock .
126
8
21,659
def add_config ( self , config_id , config_data ) : if config_id < 0 or config_id >= 2 ** 16 : raise ArgumentError ( "Config ID in mib block is not a non-negative 2-byte number" , config_data = config_id , data = config_data ) if config_id in self . configs : raise ArgumentError ( "Attempted to add the same command ID twice." , config_data = config_id , old_data = self . configs [ config_id ] , new_data = config_data ) self . configs [ config_id ] = config_data
Add a configuration variable to the MIB block
137
9
21,660
def _parse_hwtype ( self ) : self . chip_name = KNOWN_HARDWARE_TYPES . get ( self . hw_type , "Unknown Chip (type=%d)" % self . hw_type )
Convert the numerical hardware id to a chip name .
53
11
21,661
def render_template ( self , template_name , out_path = None ) : return render_template ( template_name , self . to_dict ( ) , out_path = out_path )
Render a template based on this TileBus Block .
43
10
21,662
def Tag ( env , target , source , * more_tags , * * kw_tags ) : if not target : target = source first_tag = None else : first_tag = source if first_tag : kw_tags [ first_tag [ 0 ] ] = '' if len ( kw_tags ) == 0 and len ( more_tags ) == 0 : raise UserError ( "No tags given." ) # XXX: sanity checks for x in more_tags : kw_tags [ x ] = '' if not SCons . Util . is_List ( target ) : target = [ target ] else : # hmm, sometimes the target list, is a list of a list # make sure it is flattened prior to processing. # TODO: perhaps some bug ?!? target = env . Flatten ( target ) for t in target : for ( k , v ) in kw_tags . items ( ) : # all file tags have to start with PACKAGING_, so we can later # differentiate between "normal" object attributes and the # packaging attributes. As the user should not be bothered with # that, the prefix will be added here if missing. if k [ : 10 ] != 'PACKAGING_' : k = 'PACKAGING_' + k t . Tag ( k , v )
Tag a file with the given arguments just sets the accordingly named attribute on the file object .
282
18
21,663
def copy_attr ( f1 , f2 ) : copyit = lambda x : not hasattr ( f2 , x ) and x [ : 10 ] == 'PACKAGING_' if f1 . _tags : pattrs = [ tag for tag in f1 . _tags if copyit ( tag ) ] for attr in pattrs : f2 . Tag ( attr , f1 . GetTag ( attr ) )
copies the special packaging file attributes from f1 to f2 .
95
14
21,664
def putintopackageroot ( target , source , env , pkgroot , honor_install_location = 1 ) : # make sure the packageroot is a Dir object. if SCons . Util . is_String ( pkgroot ) : pkgroot = env . Dir ( pkgroot ) if not SCons . Util . is_List ( source ) : source = [ source ] new_source = [ ] for file in source : if SCons . Util . is_String ( file ) : file = env . File ( file ) if file . is_under ( pkgroot ) : new_source . append ( file ) else : if file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) and honor_install_location : new_name = make_path_relative ( file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) ) else : new_name = make_path_relative ( file . get_path ( ) ) new_file = pkgroot . File ( new_name ) new_file = env . CopyAs ( new_file , file ) [ 0 ] copy_attr ( file , new_file ) new_source . append ( new_file ) return ( target , new_source )
Uses the CopyAs builder to copy all source files to the directory given in pkgroot .
277
20
21,665
def stripinstallbuilder ( target , source , env ) : def has_no_install_location ( file ) : return not ( file . has_builder ( ) and hasattr ( file . builder , 'name' ) and ( file . builder . name == "InstallBuilder" or file . builder . name == "InstallAsBuilder" ) ) if len ( [ src for src in source if has_no_install_location ( src ) ] ) : warn ( Warning , "there are files to package which have no\ InstallBuilder attached, this might lead to irreproducible packages" ) n_source = [ ] for s in source : if has_no_install_location ( s ) : n_source . append ( s ) else : for ss in s . sources : n_source . append ( ss ) copy_attr ( s , ss ) ss . Tag ( 'PACKAGING_INSTALL_LOCATION' , s . get_path ( ) ) return ( target , n_source )
Strips the install builder action from the source list and stores the final installation location as the PACKAGING_INSTALL_LOCATION of the source of the source file . This effectively removes the final installed files from the source list while remembering the installation location .
213
53
21,666
def restore ( self , state ) : selector = DataStreamSelector . FromString ( state . get ( u'selector' ) ) if selector != self . selector : raise ArgumentError ( "Attempted to restore a BufferedStreamWalker with a different selector" , selector = self . selector , serialized_data = state ) self . seek ( state . get ( u'offset' ) , target = "offset" )
Restore a previous state of this stream walker .
89
11
21,667
def pop ( self ) : if self . _count == 0 : raise StreamEmptyError ( "Pop called on buffered stream walker without any data" , selector = self . selector ) while True : curr = self . engine . get ( self . storage_type , self . offset ) self . offset += 1 stream = DataStream . FromEncoded ( curr . stream ) if self . matches ( stream ) : self . _count -= 1 return curr
Pop a reading off of this stream and return it .
97
11
21,668
def seek ( self , value , target = "offset" ) : if target not in ( u'offset' , u'id' ) : raise ArgumentError ( "You must specify target as either offset or id" , target = target ) if target == u'offset' : self . _verify_offset ( value ) self . offset = value else : self . offset = self . _find_id ( value ) self . _count = self . engine . count_matching ( self . selector , offset = self . offset ) curr = self . engine . get ( self . storage_type , self . offset ) return self . matches ( DataStream . FromEncoded ( curr . stream ) )
Seek this stream to a specific offset or reading id .
149
12
21,669
def skip_all ( self ) : storage , streaming = self . engine . count ( ) if self . selector . output : self . offset = streaming else : self . offset = storage self . _count = 0
Skip all readings in this walker .
44
8
21,670
def notify_rollover ( self , stream ) : self . offset -= 1 if not self . matches ( stream ) : return if self . _count == 0 : raise InternalError ( "BufferedStreamWalker out of sync with storage engine, count was wrong." ) self . _count -= 1
Notify that a reading in the given stream was overwritten .
61
13
21,671
def dump ( self ) : reading = self . reading if reading is not None : reading = reading . asdict ( ) return { u'selector' : str ( self . selector ) , u'reading' : reading }
Serialize the state of this stream walker .
47
10
21,672
def peek ( self ) : if self . reading is None : raise StreamEmptyError ( "peek called on virtual stream walker without any data" , selector = self . selector ) return self . reading
Peek at the oldest reading in this virtual stream .
42
11
21,673
def run ( self , refresh_interval = 0.05 ) : try : from asciimatics . screen import Screen except ImportError : raise ExternalError ( "You must have asciimatics installed to use LinebufferUI" , suggestion = "pip install iotilecore[ui]" ) Screen . wrapper ( self . _run_loop , arguments = [ refresh_interval ] )
Set up the loop check that the tool is installed
84
10
21,674
def find_vc_pdir ( msvc_version ) : root = 'Software\\' try : hkeys = _VCVER_TO_PRODUCT_DIR [ msvc_version ] except KeyError : debug ( "Unknown version of MSVC: %s" % msvc_version ) raise UnsupportedVersion ( "Unknown version %s" % msvc_version ) for hkroot , key in hkeys : try : comps = None if not key : comps = find_vc_pdir_vswhere ( msvc_version ) if not comps : debug ( 'find_vc_dir(): no VC found via vswhere for version {}' . format ( repr ( key ) ) ) raise SCons . Util . WinError else : if common . is_win64 ( ) : try : # ordinally at win64, try Wow6432Node first. comps = common . read_reg ( root + 'Wow6432Node\\' + key , hkroot ) except SCons . Util . WinError as e : # at Microsoft Visual Studio for Python 2.7, value is not in Wow6432Node pass if not comps : # not Win64, or Microsoft Visual Studio for Python 2.7 comps = common . read_reg ( root + key , hkroot ) except SCons . Util . WinError as e : debug ( 'find_vc_dir(): no VC registry key {}' . format ( repr ( key ) ) ) else : debug ( 'find_vc_dir(): found VC in registry: {}' . format ( comps ) ) if os . path . exists ( comps ) : return comps else : debug ( 'find_vc_dir(): reg says dir is {}, but it does not exist. (ignoring)' . format ( comps ) ) raise MissingConfiguration ( "registry dir {} not found on the filesystem" . format ( comps ) ) return None
Try to find the product directory for the given version .
416
11
21,675
def compile_sgf ( in_path , optimize = True , model = None ) : if model is None : model = DeviceModel ( ) parser = SensorGraphFileParser ( ) parser . parse_file ( in_path ) parser . compile ( model ) if optimize : opt = SensorGraphOptimizer ( ) opt . optimize ( parser . sensor_graph , model = model ) return parser . sensor_graph
Compile and optionally optimize an SGF file .
86
10
21,676
def generate ( env ) : add_all_to_env ( env ) add_f77_to_env ( env ) fcomp = env . Detect ( compilers ) or 'g77' if env [ 'PLATFORM' ] in [ 'cygwin' , 'win32' ] : env [ 'SHFORTRANFLAGS' ] = SCons . Util . CLVar ( '$FORTRANFLAGS' ) env [ 'SHF77FLAGS' ] = SCons . Util . CLVar ( '$F77FLAGS' ) else : env [ 'SHFORTRANFLAGS' ] = SCons . Util . CLVar ( '$FORTRANFLAGS -fPIC' ) env [ 'SHF77FLAGS' ] = SCons . Util . CLVar ( '$F77FLAGS -fPIC' ) env [ 'FORTRAN' ] = fcomp env [ 'SHFORTRAN' ] = '$FORTRAN' env [ 'F77' ] = fcomp env [ 'SHF77' ] = '$F77' env [ 'INCFORTRANPREFIX' ] = "-I" env [ 'INCFORTRANSUFFIX' ] = "" env [ 'INCF77PREFIX' ] = "-I" env [ 'INCF77SUFFIX' ] = ""
Add Builders and construction variables for g77 to an Environment .
300
13
21,677
def get_language ( ) : global sensor_graph , statement if sensor_graph is not None : return sensor_graph _create_primitives ( ) _create_simple_statements ( ) _create_block_bnf ( ) sensor_graph = ZeroOrMore ( statement ) + StringEnd ( ) sensor_graph . ignore ( comment ) return sensor_graph
Create or retrieve the parse tree for defining a sensor graph .
77
12
21,678
def _create_mo_file_builder ( env , * * kw ) : import SCons . Action # FIXME: What factory use for source? Ours or their? kw [ 'action' ] = SCons . Action . Action ( '$MSGFMTCOM' , '$MSGFMTCOMSTR' ) kw [ 'suffix' ] = '$MOSUFFIX' kw [ 'src_suffix' ] = '$POSUFFIX' kw [ 'src_builder' ] = '_POUpdateBuilder' kw [ 'single_source' ] = True return _MOFileBuilder ( * * kw )
Create builder object for MOFiles builder
145
7
21,679
def generate ( env , * * kw ) : import SCons . Util from SCons . Tool . GettextCommon import _detect_msgfmt try : env [ 'MSGFMT' ] = _detect_msgfmt ( env ) except : env [ 'MSGFMT' ] = 'msgfmt' env . SetDefault ( MSGFMTFLAGS = [ SCons . Util . CLVar ( '-c' ) ] , MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE' , MSGFMTCOMSTR = '' , MOSUFFIX = [ '.mo' ] , POSUFFIX = [ '.po' ] ) env . Append ( BUILDERS = { 'MOFiles' : _create_mo_file_builder ( env ) } )
Generate msgfmt tool
183
6
21,680
def RCScan ( ) : res_re = r'^(?:\s*#\s*(?:include)|' '.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' '\s*.*?)' '\s*(<|"| )([^>"\s]+)(?:[>"\s])*$' resScanner = SCons . Scanner . ClassicCPP ( "ResourceScanner" , "$RCSUFFIXES" , "CPPPATH" , res_re , recursive = no_tlb ) return resScanner
Return a prototype Scanner instance for scanning RC source files
152
11
21,681
def _read_linguas_from_files ( env , linguas_files = None ) : import SCons . Util import SCons . Environment global _re_comment global _re_lang if not SCons . Util . is_List ( linguas_files ) and not SCons . Util . is_String ( linguas_files ) and not isinstance ( linguas_files , SCons . Node . FS . Base ) and linguas_files : # If, linguas_files==True or such, then read 'LINGUAS' file. linguas_files = [ 'LINGUAS' ] if linguas_files is None : return [ ] fnodes = env . arg2nodes ( linguas_files ) linguas = [ ] for fnode in fnodes : contents = _re_comment . sub ( "" , fnode . get_text_contents ( ) ) ls = [ l for l in _re_lang . findall ( contents ) if l ] linguas . extend ( ls ) return linguas
Parse LINGUAS file and return list of extracted languages
228
13
21,682
def _init_po_files ( target , source , env ) : nop = lambda target , source , env : 0 if 'POAUTOINIT' in env : autoinit = env [ 'POAUTOINIT' ] else : autoinit = False # Well, if everything outside works well, this loop should do single # iteration. Otherwise we are rebuilding all the targets even, if just # one has changed (but is this our fault?). for tgt in target : if not tgt . exists ( ) : if autoinit : action = SCons . Action . Action ( '$MSGINITCOM' , '$MSGINITCOMSTR' ) else : msg = 'File ' + repr ( str ( tgt ) ) + ' does not exist. ' + 'If you are a translator, you can create it through: \n' + '$MSGINITCOM' action = SCons . Action . Action ( nop , msg ) status = action ( [ tgt ] , source , env ) if status : return status return 0
Action function for POInit builder .
225
7
21,683
def _create_node ( self , name , factory , directory = None , create = 1 ) : import SCons . Util node = factory ( name , directory , create ) node . set_noclean ( self . noclean ) node . set_precious ( self . precious ) if self . nodefault : self . env . Ignore ( '.' , node ) if self . alias : self . env . AlwaysBuild ( self . env . Alias ( self . alias , node ) ) return node
Create node and set it up to factory settings .
108
10
21,684
def Entry ( self , name , directory = None , create = 1 ) : return self . _create_node ( name , self . env . fs . Entry , directory , create )
Create SCons . Node . FS . Entry
38
9
21,685
def File ( self , name , directory = None , create = 1 ) : return self . _create_node ( name , self . env . fs . File , directory , create )
Create SCons . Node . FS . File
38
9
21,686
def allocate_stream ( self , stream_type , stream_id = None , previous = None , attach = False ) : if stream_type not in DataStream . TypeToString : raise ArgumentError ( "Unknown stream type in allocate_stream" , stream_type = stream_type ) if stream_id is not None and stream_id >= StreamAllocator . StartingID : raise ArgumentError ( "Attempted to explicitly allocate a stream id in the internally managed id range" , stream_id = stream_id , started_id = StreamAllocator . StartingID ) # If the stream id is not explicitly given, we need to manage and track it # from our autoallocate range if stream_id is None : if stream_type not in self . _next_id : self . _next_id [ stream_type ] = StreamAllocator . StartingID stream_id = self . _next_id [ stream_type ] self . _next_id [ stream_type ] += 1 # Keep track of how many downstream nodes are attached to this stream so # that we know when we need to split it into two. stream = DataStream ( stream_type , stream_id ) if stream not in self . _allocated_streams : self . _allocated_streams [ stream ] = ( stream , 0 , previous ) if attach : stream = self . attach_stream ( stream ) return stream
Allocate a new stream of the given type .
300
10
21,687
def attach_stream ( self , stream ) : curr_stream , count , prev = self . _allocated_streams [ stream ] # Check if we need to split this stream and allocate a new one if count == ( self . model . get ( u'max_node_outputs' ) - 1 ) : new_stream = self . allocate_stream ( curr_stream . stream_type , previous = curr_stream ) copy_desc = u"({} always) => {} using copy_all_a" . format ( curr_stream , new_stream ) self . sensor_graph . add_node ( copy_desc ) self . _allocated_streams [ stream ] = ( new_stream , 1 , curr_stream ) # If we are splitting a constant stream, make sure we also duplicate the initialization value # FIXME: If there is no default value for the stream, that is probably a warning since all constant # streams should be initialized with a value. if curr_stream . stream_type == DataStream . ConstantType and curr_stream in self . sensor_graph . constant_database : self . sensor_graph . add_constant ( new_stream , self . sensor_graph . constant_database [ curr_stream ] ) return new_stream self . _allocated_streams [ stream ] = ( curr_stream , count + 1 , prev ) return curr_stream
Notify that we would like to attach a node input to this stream .
306
15
21,688
def _find_v1_settings ( self , settings ) : if 'module_name' in settings : modname = settings [ 'module_name' ] if 'modules' not in settings or len ( settings [ 'modules' ] ) == 0 : raise DataError ( "No modules defined in module_settings.json file" ) elif len ( settings [ 'modules' ] ) > 1 : raise DataError ( "Multiple modules defined in module_settings.json file" , modules = [ x for x in settings [ 'modules' ] ] ) else : modname = list ( settings [ 'modules' ] ) [ 0 ] if modname not in settings [ 'modules' ] : raise DataError ( "Module name does not correspond with an entry in the modules directory" , name = modname , modules = [ x for x in settings [ 'modules' ] ] ) release_info = self . _load_release_info ( settings ) modsettings = settings [ 'modules' ] [ modname ] architectures = settings . get ( 'architectures' , { } ) target_defs = settings . get ( 'module_targets' , { } ) targets = target_defs . get ( modname , [ ] ) return TileInfo ( modname , modsettings , architectures , targets , release_info )
Parse a v1 module_settings . json file .
281
12
21,689
def _ensure_product_string ( cls , product ) : if isinstance ( product , str ) : return product if isinstance ( product , list ) : return os . path . join ( * product ) raise DataError ( "Unknown object (not str or list) specified as a component product" , product = product )
Ensure that all product locations are strings .
69
9
21,690
def find_products ( self , product_type ) : if self . filter_prods and product_type in self . LIST_PRODUCTS and product_type not in self . desired_prods : return [ ] if product_type in self . LIST_PRODUCTS : found_products = self . products . get ( product_type , [ ] ) else : found_products = [ x [ 0 ] for x in self . products . items ( ) if x [ 1 ] == product_type and ( not self . filter_prods or x [ 0 ] in self . desired_prods ) ] found_products = [ self . _ensure_product_string ( x ) for x in found_products ] declaration = self . PATH_PRODUCTS . get ( product_type ) if declaration is not None : found_products = [ self . _process_product_path ( x , declaration ) for x in found_products ] return found_products
Search for products of a given type .
205
8
21,691
def library_directories ( self ) : libs = self . find_products ( 'library' ) if len ( libs ) > 0 : return [ os . path . join ( self . output_folder ) ] return [ ]
Return a list of directories containing any static libraries built by this IOTile .
49
16
21,692
def filter_products ( self , desired_prods ) : self . filter_prods = True self . desired_prods = set ( desired_prods )
When asked for a product filter only those on this list .
35
12
21,693
def format_ascii ( sensor_graph ) : cmdfile = CommandFile ( "Sensor Graph" , "1.0" ) # Clear any old sensor graph cmdfile . add ( "set_online" , False ) cmdfile . add ( "clear" ) cmdfile . add ( "reset" ) # Load in the nodes for node in sensor_graph . dump_nodes ( ) : cmdfile . add ( 'add_node' , node ) # Load in the streamers for streamer in sensor_graph . streamers : other = 0xFF if streamer . with_other is not None : other = streamer . with_other args = [ streamer . selector , streamer . dest , streamer . automatic , streamer . format , streamer . report_type , other ] cmdfile . add ( 'add_streamer' , * args ) # Load all the constants for stream , value in sorted ( sensor_graph . constant_database . items ( ) , key = lambda x : x [ 0 ] . encode ( ) ) : cmdfile . add ( "push_reading" , stream , value ) # Persist the sensor graph cmdfile . add ( "persist" ) cmdfile . add ( "set_online" , True ) return cmdfile . dump ( )
Format this sensor graph as a loadable ascii file format .
286
14
21,694
def clear ( self ) : self . roots = [ ] self . nodes = [ ] self . streamers = [ ] self . constant_database = { } self . metadata_database = { } self . config_database = { }
Clear all nodes from this sensor_graph .
49
9
21,695
def add_node ( self , node_descriptor ) : if self . _max_nodes is not None and len ( self . nodes ) >= self . _max_nodes : raise ResourceUsageError ( "Maximum number of nodes exceeded" , max_nodes = self . _max_nodes ) node , inputs , processor = parse_node_descriptor ( node_descriptor , self . model ) in_root = False for i , input_data in enumerate ( inputs ) : selector , trigger = input_data walker = self . sensor_log . create_walker ( selector ) # Constant walkers begin life initialized to 0 so they always read correctly if walker . selector . inexhaustible : walker . reading = IOTileReading ( 0xFFFFFFFF , walker . selector . as_stream ( ) , 0 ) node . connect_input ( i , walker , trigger ) if selector . input and not in_root : self . roots . append ( node ) in_root = True # Make sure we only add to root list once else : found = False for other in self . nodes : if selector . matches ( other . stream ) : other . connect_output ( node ) found = True if not found and selector . buffered : raise NodeConnectionError ( "Node has input that refers to another node that has not been created yet" , node_descriptor = node_descriptor , input_selector = str ( selector ) , input_index = i ) # Also make sure we add this node's output to any other existing node's inputs # this is important for constant nodes that may be written from multiple places # FIXME: Make sure when we emit nodes, they are topologically sorted for other_node in self . nodes : for selector , trigger in other_node . inputs : if selector . matches ( node . stream ) : node . connect_output ( other_node ) # Find and load the processing function for this node func = self . find_processing_function ( processor ) if func is None : raise ProcessingFunctionError ( "Could not find processing function in installed packages" , func_name = processor ) node . set_func ( processor , func ) self . nodes . append ( node )
Add a node to the sensor graph based on the description given .
475
13
21,696
def add_config ( self , slot , config_id , config_type , value ) : if slot not in self . config_database : self . config_database [ slot ] = { } self . config_database [ slot ] [ config_id ] = ( config_type , value )
Add a config variable assignment to this sensor graph .
62
10
21,697
def add_streamer ( self , streamer ) : if self . _max_streamers is not None and len ( self . streamers ) >= self . _max_streamers : raise ResourceUsageError ( "Maximum number of streamers exceeded" , max_streamers = self . _max_streamers ) streamer . link_to_storage ( self . sensor_log ) streamer . index = len ( self . streamers ) self . streamers . append ( streamer )
Add a streamer to this sensor graph .
104
9
21,698
def add_constant ( self , stream , value ) : if stream in self . constant_database : raise ArgumentError ( "Attempted to set the same constant twice" , stream = stream , old_value = self . constant_database [ stream ] , new_value = value ) self . constant_database [ stream ] = value
Store a constant value for use in this sensor graph .
70
11
21,699
def add_metadata ( self , name , value ) : if name in self . metadata_database : raise ArgumentError ( "Attempted to set the same metadata value twice" , name = name , old_value = self . metadata_database [ name ] , new_value = value ) self . metadata_database [ name ] = value
Attach a piece of metadata to this sensorgraph .
70
11