idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
21,700
def RemoveMethod ( self , function ) : self . added_methods = [ dm for dm in self . added_methods if not dm . method is function ]
Removes the specified function s MethodWrapper from the added_methods list so we don t re - bind it when making a clone .
21,701
def Override ( self , overrides ) : if not overrides : return self o = copy_non_reserved_keywords ( overrides ) if not o : return self overrides = { } merges = None for key , value in o . items ( ) : if key == 'parse_flags' : merges = value else : overrides [ key ] = SCons . Subst . scons_subst_once ( value , self , key ) env = OverrideEnvironment ( self , overrides ) if merges : env . MergeFlags ( merges ) return env
Produce a modified environment whose variables are overridden by the overrides dictionaries . overrides is a dictionary that will override the variables of this environment .
21,702
def MergeFlags ( self , args , unique = 1 , dict = None ) : if dict is None : dict = self if not SCons . Util . is_Dict ( args ) : args = self . ParseFlags ( args ) if not unique : self . Append ( ** args ) return self for key , value in args . items ( ) : if not value : continue try : orig = self [ key ] except KeyError : orig = value else : if not orig : orig = value elif value : try : orig = orig + value except ( KeyError , TypeError ) : try : add_to_orig = orig . append except AttributeError : value . insert ( 0 , orig ) orig = value else : add_to_orig ( value ) t = [ ] if key [ - 4 : ] == 'PATH' : for v in orig : if v not in t : t . append ( v ) else : orig . reverse ( ) for v in orig : if v not in t : t . insert ( 0 , v ) self [ key ] = t return self
Merge the dict in args into the construction variables of this env or the passed - in dict . If args is not a dict it is converted into a dict using ParseFlags . If unique is not set the flags are appended rather than merged .
21,703
def get_factory ( self , factory , default = 'File' ) : name = default try : is_node = issubclass ( factory , SCons . Node . FS . Base ) except TypeError : pass else : if is_node : try : name = factory . __name__ except AttributeError : pass else : factory = None if not factory : factory = getattr ( self . fs , name ) return factory
Return a factory function for creating Nodes for this construction environment .
21,704
def Append ( self , ** kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : try : if key == 'CPPDEFINES' and SCons . Util . is_String ( self . _dict [ key ] ) : self . _dict [ key ] = [ self . _dict [ key ] ] orig = self . _dict [ key ] except KeyError : if key == 'CPPDEFINES' and SCons . Util . is_String ( val ) : self . _dict [ key ] = [ val ] else : self . _dict [ key ] = val else : try : update_dict = orig . update except AttributeError : try : self . _dict [ key ] = orig + val except ( KeyError , TypeError ) : try : add_to_orig = orig . append except AttributeError : if orig : val . insert ( 0 , orig ) self . _dict [ key ] = val else : if val : add_to_orig ( val ) else : if SCons . Util . is_List ( val ) : if key == 'CPPDEFINES' : tmp = [ ] for ( k , v ) in orig . items ( ) : if v is not None : tmp . append ( ( k , v ) ) else : tmp . append ( ( k , ) ) orig = tmp orig += val self . _dict [ key ] = orig else : for v in val : orig [ v ] = None else : try : update_dict ( val ) except ( AttributeError , TypeError , ValueError ) : if SCons . Util . is_Dict ( val ) : for k , v in val . items ( ) : orig [ k ] = v else : orig [ val ] = None self . scanner_map_delete ( kw )
Append values to existing construction variables in an Environment .
21,705
def AppendENVPath ( self , name , newpath , envname = 'ENV' , sep = os . pathsep , delete_existing = 1 ) : orig = '' if envname in self . _dict and name in self . _dict [ envname ] : orig = self . _dict [ envname ] [ name ] nv = SCons . Util . AppendPath ( orig , newpath , sep , delete_existing , canonicalize = self . _canonicalize ) if envname not in self . _dict : self . _dict [ envname ] = { } self . _dict [ envname ] [ name ] = nv
Append path elements to the path name in the ENV dictionary for this environment . Will only add any particular path once and will normpath and normcase all paths to help assure this . This can also handle the case where the env variable is a list instead of a string .
21,706
def Detect ( self , progs ) : if not SCons . Util . is_List ( progs ) : progs = [ progs ] for prog in progs : path = self . WhereIs ( prog ) if path : return prog return None
Return the first available program in progs .
21,707
def Dump ( self , key = None ) : import pprint pp = pprint . PrettyPrinter ( indent = 2 ) if key : dict = self . Dictionary ( key ) else : dict = self . Dictionary ( ) return pp . pformat ( dict )
Using the standard Python pretty printer return the contents of the scons build environment as a string .
21,708
def FindIxes ( self , paths , prefix , suffix ) : suffix = self . subst ( '$' + suffix ) prefix = self . subst ( '$' + prefix ) for path in paths : dir , name = os . path . split ( str ( path ) ) if name [ : len ( prefix ) ] == prefix and name [ - len ( suffix ) : ] == suffix : return path
Search a list of paths for something that matches the prefix and suffix .
21,709
def ParseDepends ( self , filename , must_exist = None , only_one = 0 ) : filename = self . subst ( filename ) try : fp = open ( filename , 'r' ) except IOError : if must_exist : raise return lines = SCons . Util . LogicalLines ( fp ) . readlines ( ) lines = [ l for l in lines if l [ 0 ] != '#' ] tdlist = [ ] for line in lines : try : target , depends = line . split ( ':' , 1 ) except ( AttributeError , ValueError ) : pass else : tdlist . append ( ( target . split ( ) , depends . split ( ) ) ) if only_one : targets = [ ] for td in tdlist : targets . extend ( td [ 0 ] ) if len ( targets ) > 1 : raise SCons . Errors . UserError ( "More than one dependency target found in `%s': %s" % ( filename , targets ) ) for target , depends in tdlist : self . Depends ( target , depends )
Parse a mkdep - style file for explicit dependencies . This is completely abusable and should be unnecessary in the normal case of proper SCons configuration but it may help make the transition from a Make hierarchy easier for some people to swallow . It can also be genuinely useful when using a tool that can write a . d file but for which writing a scanner would be too complicated .
21,710
def Prepend ( self , ** kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : try : orig = self . _dict [ key ] except KeyError : self . _dict [ key ] = val else : try : update_dict = orig . update except AttributeError : try : self . _dict [ key ] = val + orig except ( KeyError , TypeError ) : try : add_to_val = val . append except AttributeError : if val : orig . insert ( 0 , val ) else : if orig : add_to_val ( orig ) self . _dict [ key ] = val else : if SCons . Util . is_List ( val ) : for v in val : orig [ v ] = None else : try : update_dict ( val ) except ( AttributeError , TypeError , ValueError ) : if SCons . Util . is_Dict ( val ) : for k , v in val . items ( ) : orig [ k ] = v else : orig [ val ] = None self . scanner_map_delete ( kw )
Prepend values to existing construction variables in an Environment .
21,711
def PrependENVPath ( self , name , newpath , envname = 'ENV' , sep = os . pathsep , delete_existing = 1 ) : orig = '' if envname in self . _dict and name in self . _dict [ envname ] : orig = self . _dict [ envname ] [ name ] nv = SCons . Util . PrependPath ( orig , newpath , sep , delete_existing , canonicalize = self . _canonicalize ) if envname not in self . _dict : self . _dict [ envname ] = { } self . _dict [ envname ] [ name ] = nv
Prepend path elements to the path name in the ENV dictionary for this environment . Will only add any particular path once and will normpath and normcase all paths to help assure this . This can also handle the case where the env variable is a list instead of a string .
21,712
def PrependUnique ( self , delete_existing = 0 , ** kw ) : kw = copy_non_reserved_keywords ( kw ) for key , val in kw . items ( ) : if SCons . Util . is_List ( val ) : val = _delete_duplicates ( val , not delete_existing ) if key not in self . _dict or self . _dict [ key ] in ( '' , None ) : self . _dict [ key ] = val elif SCons . Util . is_Dict ( self . _dict [ key ] ) and SCons . Util . is_Dict ( val ) : self . _dict [ key ] . update ( val ) elif SCons . Util . is_List ( val ) : dk = self . _dict [ key ] if not SCons . Util . is_List ( dk ) : dk = [ dk ] if delete_existing : dk = [ x for x in dk if x not in val ] else : val = [ x for x in val if x not in dk ] self . _dict [ key ] = val + dk else : dk = self . _dict [ key ] if SCons . Util . is_List ( dk ) : if delete_existing : dk = [ x for x in dk if x not in val ] self . _dict [ key ] = [ val ] + dk else : if not val in dk : self . _dict [ key ] = [ val ] + dk else : if delete_existing : dk = [ x for x in dk if x not in val ] self . _dict [ key ] = val + dk self . scanner_map_delete ( kw )
Prepend values to existing construction variables in an Environment if they re not already there . If delete_existing is 1 removes existing values first so values move to front .
21,713
def ReplaceIxes ( self , path , old_prefix , old_suffix , new_prefix , new_suffix ) : old_prefix = self . subst ( '$' + old_prefix ) old_suffix = self . subst ( '$' + old_suffix ) new_prefix = self . subst ( '$' + new_prefix ) new_suffix = self . subst ( '$' + new_suffix ) dir , name = os . path . split ( str ( path ) ) if name [ : len ( old_prefix ) ] == old_prefix : name = name [ len ( old_prefix ) : ] if name [ - len ( old_suffix ) : ] == old_suffix : name = name [ : - len ( old_suffix ) ] return os . path . join ( dir , new_prefix + name + new_suffix )
Replace old_prefix with new_prefix and old_suffix with new_suffix .
21,714
def WhereIs ( self , prog , path = None , pathext = None , reject = [ ] ) : if path is None : try : path = self [ 'ENV' ] [ 'PATH' ] except KeyError : pass elif SCons . Util . is_String ( path ) : path = self . subst ( path ) if pathext is None : try : pathext = self [ 'ENV' ] [ 'PATHEXT' ] except KeyError : pass elif SCons . Util . is_String ( pathext ) : pathext = self . subst ( pathext ) prog = SCons . Util . CLVar ( self . subst ( prog ) ) path = SCons . Util . WhereIs ( prog [ 0 ] , path , pathext , reject ) if path : return path return None
Find prog in the path .
21,715
def Command ( self , target , source , action , ** kw ) : bkw = { 'action' : action , 'target_factory' : self . fs . Entry , 'source_factory' : self . fs . Entry , } try : bkw [ 'source_scanner' ] = kw [ 'source_scanner' ] except KeyError : pass else : del kw [ 'source_scanner' ] bld = SCons . Builder . Builder ( ** bkw ) return bld ( self , target , source , ** kw )
Builds the supplied target files from the supplied source files using the supplied action . Action may be any type that the Builder constructor will accept for an action .
21,716
def Depends ( self , target , dependency ) : tlist = self . arg2nodes ( target , self . fs . Entry ) dlist = self . arg2nodes ( dependency , self . fs . Entry ) for t in tlist : t . add_dependency ( dlist ) return tlist
Explicity specify that target s depend on dependency .
21,717
def NoClean ( self , * targets ) : tlist = [ ] for t in targets : tlist . extend ( self . arg2nodes ( t , self . fs . Entry ) ) for t in tlist : t . set_noclean ( ) return tlist
Tags a target so that it will not be cleaned by - c
21,718
def NoCache ( self , * targets ) : tlist = [ ] for t in targets : tlist . extend ( self . arg2nodes ( t , self . fs . Entry ) ) for t in tlist : t . set_nocache ( ) return tlist
Tags a target so that it will not be cached
21,719
def Execute ( self , action , * args , ** kw ) : action = self . Action ( action , * args , ** kw ) result = action ( [ ] , [ ] , self ) if isinstance ( result , SCons . Errors . BuildError ) : errstr = result . errstr if result . filename : errstr = result . filename + ': ' + errstr sys . stderr . write ( "scons: *** %s\n" % errstr ) return result . status else : return result
Directly execute an action through an Environment
21,720
def Ignore ( self , target , dependency ) : tlist = self . arg2nodes ( target , self . fs . Entry ) dlist = self . arg2nodes ( dependency , self . fs . Entry ) for t in tlist : t . add_ignore ( dlist ) return tlist
Ignore a dependency .
21,721
def SideEffect ( self , side_effect , target ) : side_effects = self . arg2nodes ( side_effect , self . fs . Entry ) targets = self . arg2nodes ( target , self . fs . Entry ) for side_effect in side_effects : if side_effect . multiple_side_effect_has_builder ( ) : raise SCons . Errors . UserError ( "Multiple ways to build the same target were specified for: %s" % str ( side_effect ) ) side_effect . add_source ( targets ) side_effect . side_effect = 1 self . Precious ( side_effect ) for target in targets : target . side_effects . append ( side_effect ) return side_effects
Tell scons that side_effects are built as side effects of building targets .
21,722
def Split ( self , arg ) : if SCons . Util . is_List ( arg ) : return list ( map ( self . subst , arg ) ) elif SCons . Util . is_String ( arg ) : return self . subst ( arg ) . split ( ) else : return [ self . subst ( arg ) ]
This function converts a string or list into a list of strings or Nodes . This makes things easier for users by allowing files to be specified as a white - space separated list to be split .
21,723
def FindSourceFiles ( self , node = '.' ) : node = self . arg2nodes ( node , self . fs . Entry ) [ 0 ] sources = [ ] def build_source ( ss ) : for s in ss : if isinstance ( s , SCons . Node . FS . Dir ) : build_source ( s . all_children ( ) ) elif s . has_builder ( ) : build_source ( s . sources ) elif isinstance ( s . disambiguate ( ) , SCons . Node . FS . File ) : sources . append ( s ) build_source ( node . all_children ( ) ) def final_source ( node ) : while ( node != node . srcnode ( ) ) : node = node . srcnode ( ) return node sources = list ( map ( final_source , sources ) ) return list ( set ( sources ) )
returns a list of all source files .
21,724
def FindInstalledFiles ( self ) : from SCons . Tool import install if install . _UNIQUE_INSTALLED_FILES is None : install . _UNIQUE_INSTALLED_FILES = SCons . Util . uniquer_hashables ( install . _INSTALLED_FILES ) return install . _UNIQUE_INSTALLED_FILES
returns the list of all targets of the Install and InstallAs Builder .
21,725
def generate ( env ) : global PDFLaTeXAction if PDFLaTeXAction is None : PDFLaTeXAction = SCons . Action . Action ( '$PDFLATEXCOM' , '$PDFLATEXCOMSTR' ) global PDFLaTeXAuxAction if PDFLaTeXAuxAction is None : PDFLaTeXAuxAction = SCons . Action . Action ( PDFLaTeXAuxFunction , strfunction = SCons . Tool . tex . TeXLaTeXStrFunction ) env . AppendUnique ( LATEXSUFFIXES = SCons . Tool . LaTeXSuffixes ) from . import pdf pdf . generate ( env ) bld = env [ 'BUILDERS' ] [ 'PDF' ] bld . add_action ( '.ltx' , PDFLaTeXAuxAction ) bld . add_action ( '.latex' , PDFLaTeXAuxAction ) bld . add_emitter ( '.ltx' , SCons . Tool . tex . tex_pdf_emitter ) bld . add_emitter ( '.latex' , SCons . Tool . tex . tex_pdf_emitter ) SCons . Tool . tex . generate_common ( env )
Add Builders and construction variables for pdflatex to an Environment .
21,726
def installShlibLinks ( dest , source , env ) : Verbose = False symlinks = listShlibLinksToInstall ( dest , source , env ) if Verbose : print ( 'installShlibLinks: symlinks={:r}' . format ( SCons . Tool . StringizeLibSymlinks ( symlinks ) ) ) if symlinks : SCons . Tool . CreateLibSymlinks ( env , symlinks ) return
If we are installing a versioned shared library create the required links .
21,727
def installFunc ( target , source , env ) : try : install = env [ 'INSTALL' ] except KeyError : raise SCons . Errors . UserError ( 'Missing INSTALL construction variable.' ) assert len ( target ) == len ( source ) , "Installing source %s into target %s: target and source lists must have same length." % ( list ( map ( str , source ) ) , list ( map ( str , target ) ) ) for t , s in zip ( target , source ) : if install ( t . get_path ( ) , s . get_path ( ) , env ) : return 1 return 0
Install a source file into a target using the function specified as the INSTALL construction variable .
21,728
def installFuncVersionedLib ( target , source , env ) : try : install = env [ 'INSTALLVERSIONEDLIB' ] except KeyError : raise SCons . Errors . UserError ( 'Missing INSTALLVERSIONEDLIB construction variable.' ) assert len ( target ) == len ( source ) , "Installing source %s into target %s: target and source lists must have same length." % ( list ( map ( str , source ) ) , list ( map ( str , target ) ) ) for t , s in zip ( target , source ) : if hasattr ( t . attributes , 'shlibname' ) : tpath = os . path . join ( t . get_dir ( ) , t . attributes . shlibname ) else : tpath = t . get_path ( ) if install ( tpath , s . get_path ( ) , env ) : return 1 return 0
Install a versioned library into a target using the function specified as the INSTALLVERSIONEDLIB construction variable .
21,729
def parse_multiple_rpcs ( cls , record_data ) : rpcs = [ ] while len ( record_data ) > 0 : total_length , record_type = struct . unpack_from ( "<LB3x" , record_data ) if record_type != SendErrorCheckingRPCRecord . RecordType : raise ArgumentError ( "Record set contains a record that is not an error checking RPC" , record_type = record_type ) record_contents = record_data [ 8 : total_length ] parsed_rpc = cls . _parse_rpc_info ( record_contents ) rpcs . append ( parsed_rpc ) record_data = record_data [ total_length : ] return rpcs
Parse record_data into multiple error checking rpcs .
21,730
def generate ( env ) : SCons . Tool . cc . generate ( env ) env [ 'CC' ] = env . Detect ( compilers ) or 'clang' if env [ 'PLATFORM' ] in [ 'cygwin' , 'win32' ] : env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS' ) else : env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS -fPIC' ) if env [ 'CC' ] : pipe = SCons . Action . _subproc ( env , [ env [ 'CC' ] , '--version' ] , stdin = 'devnull' , stderr = 'devnull' , stdout = subprocess . PIPE ) if pipe . wait ( ) != 0 : return line = pipe . stdout . readline ( ) if sys . version_info [ 0 ] > 2 : line = line . decode ( ) match = re . search ( r'clang +version +([0-9]+(?:\.[0-9]+)+)' , line ) if match : env [ 'CCVERSION' ] = match . group ( 1 )
Add Builders and construction variables for clang to an Environment .
21,731
def wait_running ( self , timeout = None ) : flag = self . _running . wait ( timeout ) if flag is False : raise TimeoutExpiredError ( "Timeout waiting for thread to start running" )
Wait for the thread to pass control to its routine .
21,732
def create_event ( self , register = False ) : event = asyncio . Event ( loop = self . _loop ) if register : self . _events . add ( event ) return event
Create an asyncio . Event inside the emulation loop .
21,733
def create_queue ( self , register = False ) : queue = asyncio . Queue ( loop = self . _loop ) if register : self . _work_queues . add ( queue ) return queue
Create a new work queue and optionally register it .
21,734
def start ( self ) : if self . _started is True : raise ArgumentError ( "EmulationLoop.start() called multiple times" ) self . _thread = threading . Thread ( target = self . _loop_thread_main ) self . _thread . start ( ) self . _started = True
Start the background emulation loop .
21,735
def stop ( self ) : if self . _started is False : raise ArgumentError ( "EmulationLoop.stop() called without calling start()" ) self . verify_calling_thread ( False , "Cannot call EmulationLoop.stop() from inside the event loop" ) if self . _thread . is_alive ( ) : self . _loop . call_soon_threadsafe ( self . _loop . create_task , self . _clean_shutdown ( ) ) self . _thread . join ( )
Stop the background emulation loop .
21,736
def wait_idle ( self , timeout = 1.0 ) : async def _awaiter ( ) : background_work = { x . join ( ) for x in self . _work_queues } for event in self . _events : if not event . is_set ( ) : background_work . add ( event . wait ( ) ) _done , pending = await asyncio . wait ( background_work , timeout = timeout ) if len ( pending ) > 0 : raise TimeoutExpiredError ( "Timeout waiting for event loop to become idle" , pending = pending ) if self . _on_emulation_thread ( ) : return asyncio . wait_for ( _awaiter ( ) , timeout = timeout ) self . run_task_external ( _awaiter ( ) ) return None
Wait until the rpc queue is empty .
21,737
def run_task_external ( self , coroutine ) : self . verify_calling_thread ( False , 'run_task_external must not be called from the emulation thread' ) future = asyncio . run_coroutine_threadsafe ( coroutine , self . _loop ) return future . result ( )
Inject a task into the emulation loop and wait for it to finish .
21,738
def call_rpc_external ( self , address , rpc_id , arg_payload , timeout = 10.0 ) : self . verify_calling_thread ( False , "call_rpc_external is for use **outside** of the event loop" ) response = CrossThreadResponse ( ) self . _loop . call_soon_threadsafe ( self . _rpc_queue . put_rpc , address , rpc_id , arg_payload , response ) try : return response . wait ( timeout ) except RPCRuntimeError as err : return err . binary_error
Call an RPC from outside of the event loop and block until it finishes .
21,739
async def await_rpc ( self , address , rpc_id , * args , ** kwargs ) : self . verify_calling_thread ( True , "await_rpc must be called from **inside** the event loop" ) if isinstance ( rpc_id , RPCDeclaration ) : arg_format = rpc_id . arg_format resp_format = rpc_id . resp_format rpc_id = rpc_id . rpc_id else : arg_format = kwargs . get ( 'arg_format' , None ) resp_format = kwargs . get ( 'resp_format' , None ) arg_payload = b'' if arg_format is not None : arg_payload = pack_rpc_payload ( arg_format , args ) self . _logger . debug ( "Sending rpc to %d:%04X, payload=%s" , address , rpc_id , args ) response = AwaitableResponse ( ) self . _rpc_queue . put_rpc ( address , rpc_id , arg_payload , response ) try : resp_payload = await response . wait ( 1.0 ) except RPCRuntimeError as err : resp_payload = err . binary_error if resp_format is None : return [ ] resp = unpack_rpc_payload ( resp_format , resp_payload ) return resp
Send an RPC from inside the EmulationLoop .
21,740
def verify_calling_thread ( self , should_be_emulation , message = None ) : if should_be_emulation == self . _on_emulation_thread ( ) : return if message is None : message = "Operation performed on invalid thread" raise InternalError ( message )
Verify if the calling thread is or is not the emulation thread .
21,741
def add_task ( self , tile_address , coroutine ) : self . _loop . call_soon_threadsafe ( self . _add_task , tile_address , coroutine )
Add a task into the event loop .
21,742
async def stop_tasks ( self , address ) : tasks = self . _tasks . get ( address , [ ] ) for task in tasks : task . cancel ( ) asyncio . gather ( * tasks , return_exceptions = True ) self . _tasks [ address ] = [ ]
Clear all tasks pertaining to a tile .
21,743
async def _clean_shutdown ( self ) : remaining_tasks = [ ] for task in self . _tasks . get ( None , [ ] ) : self . _logger . debug ( "Cancelling task at shutdown %s" , task ) task . cancel ( ) remaining_tasks . append ( task ) asyncio . gather ( * remaining_tasks , return_exceptions = True ) if len ( remaining_tasks ) > 0 : del self . _tasks [ None ] remaining_tasks = [ ] for address in sorted ( self . _tasks , reverse = True ) : if address is None : continue self . _logger . debug ( "Shutting down tasks for tile at %d" , address ) for task in self . _tasks . get ( address , [ ] ) : task . cancel ( ) remaining_tasks . append ( task ) asyncio . gather ( * remaining_tasks , return_exceptions = True ) await self . _rpc_queue . stop ( ) self . _loop . stop ( )
Cleanly shutdown the emulation loop .
21,744
def _add_task ( self , tile_address , coroutine ) : self . verify_calling_thread ( True , "_add_task is not thread safe" ) if tile_address not in self . _tasks : self . _tasks [ tile_address ] = [ ] task = self . _loop . create_task ( coroutine ) self . _tasks [ tile_address ] . append ( task )
Add a task from within the event loop .
21,745
def key_rule ( self , regex , verifier ) : if regex is not None : regex = re . compile ( regex ) self . _additional_key_rules . append ( ( regex , verifier ) )
Add a rule with a pattern that should apply to all keys .
21,746
def stream ( self , report , callback = None ) : conn_id = self . _find_connection ( self . conn_string ) if isinstance ( report , BroadcastReport ) : self . adapter . notify_event_nowait ( self . conn_string , 'broadcast' , report ) elif conn_id is not None : self . adapter . notify_event_nowait ( self . conn_string , 'report' , report ) if callback is not None : callback ( isinstance ( report , BroadcastReport ) or ( conn_id is not None ) )
Queue data for streaming
21,747
def trace ( self , data , callback = None ) : conn_id = self . _find_connection ( self . conn_string ) if conn_id is not None : self . adapter . notify_event_nowait ( self . conn_string , 'trace' , data ) if callback is not None : callback ( conn_id is not None )
Queue data for tracing
21,748
def _load_device ( self , name , config ) : if config is None : config_dict = { } elif isinstance ( config , dict ) : config_dict = config elif config [ 0 ] == '#' : import base64 config_str = str ( base64 . b64decode ( config [ 1 : ] ) , 'utf-8' ) config_dict = json . loads ( config_str ) else : try : with open ( config , "r" ) as conf : data = json . load ( conf ) except IOError as exc : raise ArgumentError ( "Could not open config file" , error = str ( exc ) , path = config ) if 'device' not in data : raise ArgumentError ( "Invalid configuration file passed to VirtualDeviceAdapter" , device_name = name , config_path = config , missing_key = 'device' ) config_dict = data [ 'device' ] reg = ComponentRegistry ( ) if name . endswith ( '.py' ) : _name , device_factory = reg . load_extension ( name , class_filter = VirtualIOTileDevice , unique = True ) return device_factory ( config_dict ) seen_names = [ ] for device_name , device_factory in reg . load_extensions ( 'iotile.virtual_device' , class_filter = VirtualIOTileDevice , product_name = "virtual_device" ) : if device_name == name : return device_factory ( config_dict ) seen_names . append ( device_name ) raise ArgumentError ( "Could not find virtual_device by name" , name = name , known_names = seen_names )
Load a device either from a script or from an installed module
21,749
async def disconnect ( self , conn_id ) : self . _ensure_connection ( conn_id , True ) dev = self . _get_property ( conn_id , 'device' ) dev . connected = False self . _teardown_connection ( conn_id )
Asynchronously disconnect from a connected device
21,750
async def _send_scan_event ( self , device ) : conn_string = str ( device . iotile_id ) info = { 'connection_string' : conn_string , 'uuid' : device . iotile_id , 'signal_strength' : 100 , 'validity_period' : self . ExpirationTime } await self . notify_event ( conn_string , 'device_seen' , info )
Send a scan event from a device .
21,751
def rpc_name ( rpc_id ) : name = _RPC_NAME_MAP . get ( rpc_id ) if name is None : name = 'RPC 0x%04X' % rpc_id return name
Map an RPC id to a string name .
21,752
def stream_name ( stream_id ) : name = _STREAM_NAME_MAP . get ( stream_id ) if name is None : name = str ( DataStream . FromEncoded ( stream_id ) ) return "{} (0x{:04X})" . format ( name , stream_id )
Map a stream id to a human readable name .
21,753
def set_option ( self , name , value ) : if not name in self . settable : raise SCons . Errors . UserError ( "This option is not settable from a SConscript file: %s" % name ) if name == 'num_jobs' : try : value = int ( value ) if value < 1 : raise ValueError except ValueError : raise SCons . Errors . UserError ( "A positive integer is required: %s" % repr ( value ) ) elif name == 'max_drift' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'duplicate' : try : value = str ( value ) except ValueError : raise SCons . Errors . UserError ( "A string is required: %s" % repr ( value ) ) if not value in SCons . Node . FS . Valid_Duplicates : raise SCons . Errors . UserError ( "Not a valid duplication style: %s" % value ) SCons . Node . FS . set_duplicate ( value ) elif name == 'diskcheck' : try : value = diskcheck_convert ( value ) except ValueError as v : raise SCons . Errors . UserError ( "Not a valid diskcheck value: %s" % v ) if 'diskcheck' not in self . __dict__ : SCons . Node . FS . set_diskcheck ( value ) elif name == 'stack_size' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'md5_chunksize' : try : value = int ( value ) except ValueError : raise SCons . Errors . UserError ( "An integer is required: %s" % repr ( value ) ) elif name == 'warn' : if SCons . Util . is_String ( value ) : value = [ value ] value = self . __SConscript_settings__ . get ( name , [ ] ) + value SCons . Warnings . process_warn_strings ( value ) self . __SConscript_settings__ [ name ] = value
Sets an option from an SConscript file .
21,754
def format_help ( self , formatter ) : formatter . dedent ( ) result = formatter . format_heading ( self . title ) formatter . indent ( ) result = result + optparse . OptionContainer . format_help ( self , formatter ) return result
Format an option group s help text outdenting the title so it s flush with the SCons Options title we print at the top .
21,755
def _process_long_opt ( self , rargs , values ) : arg = rargs . pop ( 0 ) if "=" in arg : ( opt , next_arg ) = arg . split ( "=" , 1 ) rargs . insert ( 0 , next_arg ) had_explicit_value = True else : opt = arg had_explicit_value = False try : opt = self . _match_long_opt ( opt ) except optparse . BadOptionError : if self . preserve_unknown_options : self . largs . append ( arg ) if had_explicit_value : rargs . pop ( 0 ) return raise option = self . _long_opt [ opt ] if option . takes_value ( ) : nargs = option . nargs if nargs == '?' : if had_explicit_value : value = rargs . pop ( 0 ) else : value = option . const elif len ( rargs ) < nargs : if nargs == 1 : if not option . choices : self . error ( _ ( "%s option requires an argument" ) % opt ) else : msg = _ ( "%s option requires an argument " % opt ) msg += _ ( "(choose from %s)" % ', ' . join ( option . choices ) ) self . error ( msg ) else : self . error ( _ ( "%s option requires %d arguments" ) % ( opt , nargs ) ) elif nargs == 1 : value = rargs . pop ( 0 ) else : value = tuple ( rargs [ 0 : nargs ] ) del rargs [ 0 : nargs ] elif had_explicit_value : self . error ( _ ( "%s option does not take a value" ) % opt ) else : value = None option . process ( opt , value , values , self )
SCons - specific processing of long options .
21,756
def add_local_option ( self , * args , ** kw ) : try : group = self . local_option_group except AttributeError : group = SConsOptionGroup ( self , 'Local Options' ) group = self . add_option_group ( group ) self . local_option_group = group result = group . add_option ( * args , ** kw ) if result : setattr ( self . values . __defaults__ , result . dest , result . default ) self . reparse_local_options ( ) return result
Adds a local option to the parser .
21,757
def format_heading ( self , heading ) : if heading == 'Options' : heading = "SCons Options" return optparse . IndentedHelpFormatter . format_heading ( self , heading )
This translates any heading of options or Options into SCons Options . Unfortunately we have to do this here because those titles are hard - coded in the optparse calls .
21,758
def to_dict ( self ) : out_dict = { } out_dict [ 'commands' ] = self . commands out_dict [ 'configs' ] = self . configs out_dict [ 'short_name' ] = self . name out_dict [ 'versions' ] = { 'module' : self . module_version , 'api' : self . api_version } return out_dict
Convert this object into a dictionary .
21,759
def set_api_version ( self , major , minor ) : if not self . _is_byte ( major ) or not self . _is_byte ( minor ) : raise ArgumentError ( "Invalid API version number with component that does not fit in 1 byte" , major = major , minor = minor ) self . api_version = ( major , minor )
Set the API version this module was designed for .
21,760
def set_module_version ( self , major , minor , patch ) : if not ( self . _is_byte ( major ) and self . _is_byte ( minor ) and self . _is_byte ( patch ) ) : raise ArgumentError ( "Invalid module version number with component that does not fit in 1 byte" , major = major , minor = minor , patch = patch ) self . module_version = ( major , minor , patch )
Set the module version for this module .
21,761
def set_name ( self , name ) : if len ( name ) > 6 : raise ArgumentError ( "Name must be at most 6 characters long" , name = name ) if len ( name ) < 6 : name += ' ' * ( 6 - len ( name ) ) self . name = name
Set the module name to a 6 byte string
21,762
def add_command ( self , cmd_id , handler ) : if cmd_id < 0 or cmd_id >= 2 ** 16 : raise ArgumentError ( "Command ID in mib block is not a non-negative 2-byte number" , cmd_id = cmd_id , handler = handler ) if cmd_id in self . commands : raise ArgumentError ( "Attempted to add the same command ID twice." , cmd_id = cmd_id , existing_handler = self . commands [ cmd_id ] , new_handler = handler ) self . commands [ cmd_id ] = handler
Add a command to the TBBlock .
21,763
def add_config ( self , config_id , config_data ) : if config_id < 0 or config_id >= 2 ** 16 : raise ArgumentError ( "Config ID in mib block is not a non-negative 2-byte number" , config_data = config_id , data = config_data ) if config_id in self . configs : raise ArgumentError ( "Attempted to add the same command ID twice." , config_data = config_id , old_data = self . configs [ config_id ] , new_data = config_data ) self . configs [ config_id ] = config_data
Add a configuration variable to the MIB block
21,764
def _parse_hwtype ( self ) : self . chip_name = KNOWN_HARDWARE_TYPES . get ( self . hw_type , "Unknown Chip (type=%d)" % self . hw_type )
Convert the numerical hardware id to a chip name .
21,765
def render_template ( self , template_name , out_path = None ) : return render_template ( template_name , self . to_dict ( ) , out_path = out_path )
Render a template based on this TileBus Block .
21,766
def Tag ( env , target , source , * more_tags , ** kw_tags ) : if not target : target = source first_tag = None else : first_tag = source if first_tag : kw_tags [ first_tag [ 0 ] ] = '' if len ( kw_tags ) == 0 and len ( more_tags ) == 0 : raise UserError ( "No tags given." ) for x in more_tags : kw_tags [ x ] = '' if not SCons . Util . is_List ( target ) : target = [ target ] else : target = env . Flatten ( target ) for t in target : for ( k , v ) in kw_tags . items ( ) : if k [ : 10 ] != 'PACKAGING_' : k = 'PACKAGING_' + k t . Tag ( k , v )
Tag a file with the given arguments just sets the accordingly named attribute on the file object .
21,767
def copy_attr ( f1 , f2 ) : copyit = lambda x : not hasattr ( f2 , x ) and x [ : 10 ] == 'PACKAGING_' if f1 . _tags : pattrs = [ tag for tag in f1 . _tags if copyit ( tag ) ] for attr in pattrs : f2 . Tag ( attr , f1 . GetTag ( attr ) )
copies the special packaging file attributes from f1 to f2 .
21,768
def putintopackageroot ( target , source , env , pkgroot , honor_install_location = 1 ) : if SCons . Util . is_String ( pkgroot ) : pkgroot = env . Dir ( pkgroot ) if not SCons . Util . is_List ( source ) : source = [ source ] new_source = [ ] for file in source : if SCons . Util . is_String ( file ) : file = env . File ( file ) if file . is_under ( pkgroot ) : new_source . append ( file ) else : if file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) and honor_install_location : new_name = make_path_relative ( file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) ) else : new_name = make_path_relative ( file . get_path ( ) ) new_file = pkgroot . File ( new_name ) new_file = env . CopyAs ( new_file , file ) [ 0 ] copy_attr ( file , new_file ) new_source . append ( new_file ) return ( target , new_source )
Uses the CopyAs builder to copy all source files to the directory given in pkgroot .
21,769
def stripinstallbuilder ( target , source , env ) : def has_no_install_location ( file ) : return not ( file . has_builder ( ) and hasattr ( file . builder , 'name' ) and ( file . builder . name == "InstallBuilder" or file . builder . name == "InstallAsBuilder" ) ) if len ( [ src for src in source if has_no_install_location ( src ) ] ) : warn ( Warning , "there are files to package which have no\ InstallBuilder attached, this might lead to irreproducible packages" ) n_source = [ ] for s in source : if has_no_install_location ( s ) : n_source . append ( s ) else : for ss in s . sources : n_source . append ( ss ) copy_attr ( s , ss ) ss . Tag ( 'PACKAGING_INSTALL_LOCATION' , s . get_path ( ) ) return ( target , n_source )
Strips the install builder action from the source list and stores the final installation location as the PACKAGING_INSTALL_LOCATION of the source of the source file . This effectively removes the final installed files from the source list while remembering the installation location .
21,770
def restore ( self , state ) : selector = DataStreamSelector . FromString ( state . get ( u'selector' ) ) if selector != self . selector : raise ArgumentError ( "Attempted to restore a BufferedStreamWalker with a different selector" , selector = self . selector , serialized_data = state ) self . seek ( state . get ( u'offset' ) , target = "offset" )
Restore a previous state of this stream walker .
21,771
def pop ( self ) : if self . _count == 0 : raise StreamEmptyError ( "Pop called on buffered stream walker without any data" , selector = self . selector ) while True : curr = self . engine . get ( self . storage_type , self . offset ) self . offset += 1 stream = DataStream . FromEncoded ( curr . stream ) if self . matches ( stream ) : self . _count -= 1 return curr
Pop a reading off of this stream and return it .
21,772
def seek ( self , value , target = "offset" ) : if target not in ( u'offset' , u'id' ) : raise ArgumentError ( "You must specify target as either offset or id" , target = target ) if target == u'offset' : self . _verify_offset ( value ) self . offset = value else : self . offset = self . _find_id ( value ) self . _count = self . engine . count_matching ( self . selector , offset = self . offset ) curr = self . engine . get ( self . storage_type , self . offset ) return self . matches ( DataStream . FromEncoded ( curr . stream ) )
Seek this stream to a specific offset or reading id .
21,773
def skip_all ( self ) : storage , streaming = self . engine . count ( ) if self . selector . output : self . offset = streaming else : self . offset = storage self . _count = 0
Skip all readings in this walker .
21,774
def notify_rollover ( self , stream ) : self . offset -= 1 if not self . matches ( stream ) : return if self . _count == 0 : raise InternalError ( "BufferedStreamWalker out of sync with storage engine, count was wrong." ) self . _count -= 1
Notify that a reading in the given stream was overwritten .
21,775
def dump ( self ) : reading = self . reading if reading is not None : reading = reading . asdict ( ) return { u'selector' : str ( self . selector ) , u'reading' : reading }
Serialize the state of this stream walker .
21,776
def peek ( self ) : if self . reading is None : raise StreamEmptyError ( "peek called on virtual stream walker without any data" , selector = self . selector ) return self . reading
Peek at the oldest reading in this virtual stream .
21,777
def run ( self , refresh_interval = 0.05 ) : try : from asciimatics . screen import Screen except ImportError : raise ExternalError ( "You must have asciimatics installed to use LinebufferUI" , suggestion = "pip install iotilecore[ui]" ) Screen . wrapper ( self . _run_loop , arguments = [ refresh_interval ] )
Set up the loop check that the tool is installed
21,778
def find_vc_pdir ( msvc_version ) : root = 'Software\\' try : hkeys = _VCVER_TO_PRODUCT_DIR [ msvc_version ] except KeyError : debug ( "Unknown version of MSVC: %s" % msvc_version ) raise UnsupportedVersion ( "Unknown version %s" % msvc_version ) for hkroot , key in hkeys : try : comps = None if not key : comps = find_vc_pdir_vswhere ( msvc_version ) if not comps : debug ( 'find_vc_dir(): no VC found via vswhere for version {}' . format ( repr ( key ) ) ) raise SCons . Util . WinError else : if common . is_win64 ( ) : try : comps = common . read_reg ( root + 'Wow6432Node\\' + key , hkroot ) except SCons . Util . WinError as e : pass if not comps : comps = common . read_reg ( root + key , hkroot ) except SCons . Util . WinError as e : debug ( 'find_vc_dir(): no VC registry key {}' . format ( repr ( key ) ) ) else : debug ( 'find_vc_dir(): found VC in registry: {}' . format ( comps ) ) if os . path . exists ( comps ) : return comps else : debug ( 'find_vc_dir(): reg says dir is {}, but it does not exist. (ignoring)' . format ( comps ) ) raise MissingConfiguration ( "registry dir {} not found on the filesystem" . format ( comps ) ) return None
Try to find the product directory for the given version .
21,779
def compile_sgf ( in_path , optimize = True , model = None ) : if model is None : model = DeviceModel ( ) parser = SensorGraphFileParser ( ) parser . parse_file ( in_path ) parser . compile ( model ) if optimize : opt = SensorGraphOptimizer ( ) opt . optimize ( parser . sensor_graph , model = model ) return parser . sensor_graph
Compile and optionally optimize an SGF file .
21,780
def generate ( env ) : add_all_to_env ( env ) add_f77_to_env ( env ) fcomp = env . Detect ( compilers ) or 'g77' if env [ 'PLATFORM' ] in [ 'cygwin' , 'win32' ] : env [ 'SHFORTRANFLAGS' ] = SCons . Util . CLVar ( '$FORTRANFLAGS' ) env [ 'SHF77FLAGS' ] = SCons . Util . CLVar ( '$F77FLAGS' ) else : env [ 'SHFORTRANFLAGS' ] = SCons . Util . CLVar ( '$FORTRANFLAGS -fPIC' ) env [ 'SHF77FLAGS' ] = SCons . Util . CLVar ( '$F77FLAGS -fPIC' ) env [ 'FORTRAN' ] = fcomp env [ 'SHFORTRAN' ] = '$FORTRAN' env [ 'F77' ] = fcomp env [ 'SHF77' ] = '$F77' env [ 'INCFORTRANPREFIX' ] = "-I" env [ 'INCFORTRANSUFFIX' ] = "" env [ 'INCF77PREFIX' ] = "-I" env [ 'INCF77SUFFIX' ] = ""
Add Builders and construction variables for g77 to an Environment .
21,781
def get_language ( ) : global sensor_graph , statement if sensor_graph is not None : return sensor_graph _create_primitives ( ) _create_simple_statements ( ) _create_block_bnf ( ) sensor_graph = ZeroOrMore ( statement ) + StringEnd ( ) sensor_graph . ignore ( comment ) return sensor_graph
Create or retrieve the parse tree for defining a sensor graph .
21,782
def _create_mo_file_builder ( env , ** kw ) : import SCons . Action kw [ 'action' ] = SCons . Action . Action ( '$MSGFMTCOM' , '$MSGFMTCOMSTR' ) kw [ 'suffix' ] = '$MOSUFFIX' kw [ 'src_suffix' ] = '$POSUFFIX' kw [ 'src_builder' ] = '_POUpdateBuilder' kw [ 'single_source' ] = True return _MOFileBuilder ( ** kw )
Create builder object for MOFiles builder
21,783
def generate ( env , ** kw ) : import SCons . Util from SCons . Tool . GettextCommon import _detect_msgfmt try : env [ 'MSGFMT' ] = _detect_msgfmt ( env ) except : env [ 'MSGFMT' ] = 'msgfmt' env . SetDefault ( MSGFMTFLAGS = [ SCons . Util . CLVar ( '-c' ) ] , MSGFMTCOM = '$MSGFMT $MSGFMTFLAGS -o $TARGET $SOURCE' , MSGFMTCOMSTR = '' , MOSUFFIX = [ '.mo' ] , POSUFFIX = [ '.po' ] ) env . Append ( BUILDERS = { 'MOFiles' : _create_mo_file_builder ( env ) } )
Generate msgfmt tool
21,784
def RCScan ( ) : res_re = r'^(?:\s*#\s*(?:include)|' '.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' '\s*.*?)' '\s*(<|"| )([^>"\s]+)(?:[>"\s])*$' resScanner = SCons . Scanner . ClassicCPP ( "ResourceScanner" , "$RCSUFFIXES" , "CPPPATH" , res_re , recursive = no_tlb ) return resScanner
Return a prototype Scanner instance for scanning RC source files
21,785
def _read_linguas_from_files ( env , linguas_files = None ) : import SCons . Util import SCons . Environment global _re_comment global _re_lang if not SCons . Util . is_List ( linguas_files ) and not SCons . Util . is_String ( linguas_files ) and not isinstance ( linguas_files , SCons . Node . FS . Base ) and linguas_files : linguas_files = [ 'LINGUAS' ] if linguas_files is None : return [ ] fnodes = env . arg2nodes ( linguas_files ) linguas = [ ] for fnode in fnodes : contents = _re_comment . sub ( "" , fnode . get_text_contents ( ) ) ls = [ l for l in _re_lang . findall ( contents ) if l ] linguas . extend ( ls ) return linguas
Parse LINGUAS file and return list of extracted languages
21,786
def _init_po_files ( target , source , env ) : nop = lambda target , source , env : 0 if 'POAUTOINIT' in env : autoinit = env [ 'POAUTOINIT' ] else : autoinit = False for tgt in target : if not tgt . exists ( ) : if autoinit : action = SCons . Action . Action ( '$MSGINITCOM' , '$MSGINITCOMSTR' ) else : msg = 'File ' + repr ( str ( tgt ) ) + ' does not exist. ' + 'If you are a translator, you can create it through: \n' + '$MSGINITCOM' action = SCons . Action . Action ( nop , msg ) status = action ( [ tgt ] , source , env ) if status : return status return 0
Action function for POInit builder .
21,787
def _create_node ( self , name , factory , directory = None , create = 1 ) : import SCons . Util node = factory ( name , directory , create ) node . set_noclean ( self . noclean ) node . set_precious ( self . precious ) if self . nodefault : self . env . Ignore ( '.' , node ) if self . alias : self . env . AlwaysBuild ( self . env . Alias ( self . alias , node ) ) return node
Create node and set it up to factory settings .
21,788
def Entry ( self , name , directory = None , create = 1 ) : return self . _create_node ( name , self . env . fs . Entry , directory , create )
Create SCons . Node . FS . Entry
21,789
def File ( self , name , directory = None , create = 1 ) : return self . _create_node ( name , self . env . fs . File , directory , create )
Create SCons . Node . FS . File
21,790
def allocate_stream ( self , stream_type , stream_id = None , previous = None , attach = False ) : if stream_type not in DataStream . TypeToString : raise ArgumentError ( "Unknown stream type in allocate_stream" , stream_type = stream_type ) if stream_id is not None and stream_id >= StreamAllocator . StartingID : raise ArgumentError ( "Attempted to explicitly allocate a stream id in the internally managed id range" , stream_id = stream_id , started_id = StreamAllocator . StartingID ) if stream_id is None : if stream_type not in self . _next_id : self . _next_id [ stream_type ] = StreamAllocator . StartingID stream_id = self . _next_id [ stream_type ] self . _next_id [ stream_type ] += 1 stream = DataStream ( stream_type , stream_id ) if stream not in self . _allocated_streams : self . _allocated_streams [ stream ] = ( stream , 0 , previous ) if attach : stream = self . attach_stream ( stream ) return stream
Allocate a new stream of the given type .
21,791
def attach_stream ( self , stream ) : curr_stream , count , prev = self . _allocated_streams [ stream ] if count == ( self . model . get ( u'max_node_outputs' ) - 1 ) : new_stream = self . allocate_stream ( curr_stream . stream_type , previous = curr_stream ) copy_desc = u"({} always) => {} using copy_all_a" . format ( curr_stream , new_stream ) self . sensor_graph . add_node ( copy_desc ) self . _allocated_streams [ stream ] = ( new_stream , 1 , curr_stream ) if curr_stream . stream_type == DataStream . ConstantType and curr_stream in self . sensor_graph . constant_database : self . sensor_graph . add_constant ( new_stream , self . sensor_graph . constant_database [ curr_stream ] ) return new_stream self . _allocated_streams [ stream ] = ( curr_stream , count + 1 , prev ) return curr_stream
Notify that we would like to attach a node input to this stream .
21,792
def _find_v1_settings ( self , settings ) : if 'module_name' in settings : modname = settings [ 'module_name' ] if 'modules' not in settings or len ( settings [ 'modules' ] ) == 0 : raise DataError ( "No modules defined in module_settings.json file" ) elif len ( settings [ 'modules' ] ) > 1 : raise DataError ( "Multiple modules defined in module_settings.json file" , modules = [ x for x in settings [ 'modules' ] ] ) else : modname = list ( settings [ 'modules' ] ) [ 0 ] if modname not in settings [ 'modules' ] : raise DataError ( "Module name does not correspond with an entry in the modules directory" , name = modname , modules = [ x for x in settings [ 'modules' ] ] ) release_info = self . _load_release_info ( settings ) modsettings = settings [ 'modules' ] [ modname ] architectures = settings . get ( 'architectures' , { } ) target_defs = settings . get ( 'module_targets' , { } ) targets = target_defs . get ( modname , [ ] ) return TileInfo ( modname , modsettings , architectures , targets , release_info )
Parse a v1 module_settings . json file .
21,793
def _ensure_product_string ( cls , product ) : if isinstance ( product , str ) : return product if isinstance ( product , list ) : return os . path . join ( * product ) raise DataError ( "Unknown object (not str or list) specified as a component product" , product = product )
Ensure that all product locations are strings .
21,794
def find_products ( self , product_type ) : if self . filter_prods and product_type in self . LIST_PRODUCTS and product_type not in self . desired_prods : return [ ] if product_type in self . LIST_PRODUCTS : found_products = self . products . get ( product_type , [ ] ) else : found_products = [ x [ 0 ] for x in self . products . items ( ) if x [ 1 ] == product_type and ( not self . filter_prods or x [ 0 ] in self . desired_prods ) ] found_products = [ self . _ensure_product_string ( x ) for x in found_products ] declaration = self . PATH_PRODUCTS . get ( product_type ) if declaration is not None : found_products = [ self . _process_product_path ( x , declaration ) for x in found_products ] return found_products
Search for products of a given type .
21,795
def library_directories ( self ) : libs = self . find_products ( 'library' ) if len ( libs ) > 0 : return [ os . path . join ( self . output_folder ) ] return [ ]
Return a list of directories containing any static libraries built by this IOTile .
21,796
def filter_products ( self , desired_prods ) : self . filter_prods = True self . desired_prods = set ( desired_prods )
When asked for a product filter only those on this list .
21,797
def format_ascii ( sensor_graph ) : cmdfile = CommandFile ( "Sensor Graph" , "1.0" ) cmdfile . add ( "set_online" , False ) cmdfile . add ( "clear" ) cmdfile . add ( "reset" ) for node in sensor_graph . dump_nodes ( ) : cmdfile . add ( 'add_node' , node ) for streamer in sensor_graph . streamers : other = 0xFF if streamer . with_other is not None : other = streamer . with_other args = [ streamer . selector , streamer . dest , streamer . automatic , streamer . format , streamer . report_type , other ] cmdfile . add ( 'add_streamer' , * args ) for stream , value in sorted ( sensor_graph . constant_database . items ( ) , key = lambda x : x [ 0 ] . encode ( ) ) : cmdfile . add ( "push_reading" , stream , value ) cmdfile . add ( "persist" ) cmdfile . add ( "set_online" , True ) return cmdfile . dump ( )
Format this sensor graph as a loadable ascii file format .
21,798
def clear ( self ) : self . roots = [ ] self . nodes = [ ] self . streamers = [ ] self . constant_database = { } self . metadata_database = { } self . config_database = { }
Clear all nodes from this sensor_graph .
21,799
def add_node ( self , node_descriptor ) : if self . _max_nodes is not None and len ( self . nodes ) >= self . _max_nodes : raise ResourceUsageError ( "Maximum number of nodes exceeded" , max_nodes = self . _max_nodes ) node , inputs , processor = parse_node_descriptor ( node_descriptor , self . model ) in_root = False for i , input_data in enumerate ( inputs ) : selector , trigger = input_data walker = self . sensor_log . create_walker ( selector ) if walker . selector . inexhaustible : walker . reading = IOTileReading ( 0xFFFFFFFF , walker . selector . as_stream ( ) , 0 ) node . connect_input ( i , walker , trigger ) if selector . input and not in_root : self . roots . append ( node ) in_root = True else : found = False for other in self . nodes : if selector . matches ( other . stream ) : other . connect_output ( node ) found = True if not found and selector . buffered : raise NodeConnectionError ( "Node has input that refers to another node that has not been created yet" , node_descriptor = node_descriptor , input_selector = str ( selector ) , input_index = i ) for other_node in self . nodes : for selector , trigger in other_node . inputs : if selector . matches ( node . stream ) : node . connect_output ( other_node ) func = self . find_processing_function ( processor ) if func is None : raise ProcessingFunctionError ( "Could not find processing function in installed packages" , func_name = processor ) node . set_func ( processor , func ) self . nodes . append ( node )
Add a node to the sensor graph based on the description given .