idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
22,300
def clear_components ( self ) : ComponentRegistry . _component_overlays = { } for key in self . list_components ( ) : self . remove_component ( key )
Clear all of the registered components
22,301
def list_components ( self ) : overlays = list ( self . _component_overlays ) items = self . kvstore . get_all ( ) return overlays + [ x [ 0 ] for x in items if not x [ 0 ] . startswith ( 'config:' ) ]
List all of the registered component names .
22,302
def iter_components ( self ) : names = self . list_components ( ) for name in names : yield self . get_component ( name )
Iterate over all defined components yielding IOTile objects .
22,303
def list_config ( self ) : items = self . kvstore . get_all ( ) return [ "{0}={1}" . format ( x [ 0 ] [ len ( 'config:' ) : ] , x [ 1 ] ) for x in items if x [ 0 ] . startswith ( 'config:' ) ]
List all of the configuration variables
22,304
def set_config ( self , key , value ) : keyname = "config:" + key self . kvstore . set ( keyname , value )
Set a persistent config key to a value stored in the registry
22,305
def get_config ( self , key , default = MISSING ) : keyname = "config:" + key try : return self . kvstore . get ( keyname ) except KeyError : if default is MISSING : raise ArgumentError ( "No config value found for key" , key = key ) return default
Get the value of a persistent config key from the registry
22,306
def execute_action_list ( obj , target , kw ) : env = obj . get_build_env ( ) kw = obj . get_kw ( kw ) status = 0 for act in obj . get_action_list ( ) : args = ( [ ] , [ ] , env ) status = act ( * args , ** kw ) if isinstance ( status , SCons . Errors . BuildError ) : status . executor = obj raise status elif status : msg = "Error %s" % status raise SCons . Errors . BuildError ( errstr = msg , node = obj . batches [ 0 ] . targets , executor = obj , action = act ) return status
Actually execute the action list .
22,307
def get_all_targets ( self ) : result = [ ] for batch in self . batches : result . extend ( batch . targets ) return result
Returns all targets for all batches of this Executor .
22,308
def get_all_sources ( self ) : result = [ ] for batch in self . batches : result . extend ( batch . sources ) return result
Returns all sources for all batches of this Executor .
22,309
def get_action_side_effects ( self ) : result = SCons . Util . UniqueList ( [ ] ) for target in self . get_action_targets ( ) : result . extend ( target . side_effects ) return result
Returns all side effects for all batches of this Executor used by the underlying Action .
22,310
def get_build_env ( self ) : try : return self . _memo [ 'get_build_env' ] except KeyError : pass overrides = { } for odict in self . overridelist : overrides . update ( odict ) import SCons . Defaults env = self . env or SCons . Defaults . DefaultEnvironment ( ) build_env = env . Override ( overrides ) self . _memo [ 'get_build_env' ] = build_env return build_env
Fetch or create the appropriate build Environment for this Executor .
22,311
def get_build_scanner_path ( self , scanner ) : env = self . get_build_env ( ) try : cwd = self . batches [ 0 ] . targets [ 0 ] . cwd except ( IndexError , AttributeError ) : cwd = None return scanner . path ( env , cwd , self . get_all_targets ( ) , self . get_all_sources ( ) )
Fetch the scanner path for this executor s targets and sources .
22,312
def add_sources ( self , sources ) : assert ( len ( self . batches ) == 1 ) sources = [ x for x in sources if x not in self . batches [ 0 ] . sources ] self . batches [ 0 ] . sources . extend ( sources )
Add source files to this Executor s list . This is necessary for multi Builders that can be called repeatedly to build up a source file list for a given target .
22,313
def add_batch ( self , targets , sources ) : self . batches . append ( Batch ( targets , sources ) )
Add pair of associated target and source to this Executor s list . This is necessary for batch Builders that can be called repeatedly to build up a list of matching target and source files that will be used in order to update multiple target files at once from multiple corresponding source files for tools like MSVC that support it .
22,314
def get_contents ( self ) : try : return self . _memo [ 'get_contents' ] except KeyError : pass env = self . get_build_env ( ) action_list = self . get_action_list ( ) all_targets = self . get_all_targets ( ) all_sources = self . get_all_sources ( ) result = bytearray ( "" , 'utf-8' ) . join ( [ action . get_contents ( all_targets , all_sources , env ) for action in action_list ] ) self . _memo [ 'get_contents' ] = result return result
Fetch the signature contents . This is the main reason this class exists so we can compute this once and cache it regardless of how many target or source Nodes there are .
22,315
def get_implicit_deps ( self ) : result = [ ] build_env = self . get_build_env ( ) for act in self . get_action_list ( ) : deps = act . get_implicit_deps ( self . get_all_targets ( ) , self . get_all_sources ( ) , build_env ) result . extend ( deps ) return result
Return the executor s implicit dependencies i . e . the nodes of the commands to be executed .
22,316
def _morph ( self ) : batches = self . batches self . __class__ = Executor self . __init__ ( [ ] ) self . batches = batches
Morph this Null executor to a real Executor object .
22,317
def LoadPlugins ( cls ) : if cls . PLUGINS_LOADED : return reg = ComponentRegistry ( ) for _ , record in reg . load_extensions ( 'iotile.update_record' ) : cls . RegisterRecordType ( record ) cls . PLUGINS_LOADED = True
Load all registered iotile . update_record plugins .
22,318
def RegisterRecordType ( cls , record_class ) : record_type = record_class . MatchType ( ) if record_type not in UpdateRecord . KNOWN_CLASSES : UpdateRecord . KNOWN_CLASSES [ record_type ] = [ ] UpdateRecord . KNOWN_CLASSES [ record_type ] . append ( record_class )
Register a known record type in KNOWN_CLASSES .
22,319
def _setup ( self ) : systick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) fasttick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) user1tick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) user2tick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( system_tick , systick ) ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( fast_tick , fasttick ) ) self . sensor_graph . add_config ( SlotIdentifier . FromString ( 'controller' ) , config_fast_tick_secs , 'uint32_t' , 1 ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( tick_1 , user1tick ) ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( tick_2 , user2tick ) ) self . system_tick = systick self . fast_tick = fasttick self . user1_tick = user1tick self . user2_tick = user2tick
Prepare for code generation by setting up root clock nodes .
22,320
def find_proxy_plugin ( component , plugin_name ) : reg = ComponentRegistry ( ) plugins = reg . load_extensions ( 'iotile.proxy_plugin' , comp_filter = component , class_filter = TileBusProxyPlugin , product_name = 'proxy_plugin' ) for _name , plugin in plugins : if plugin . __name__ == plugin_name : return plugin raise DataError ( "Could not find proxy plugin module in registered components or installed distributions" , component = component , name = plugin_name )
Attempt to find a proxy plugin provided by a specific component
22,321
def _convert_trigger ( self , trigger_def , parent ) : if trigger_def . explicit_stream is None : stream = parent . resolve_identifier ( trigger_def . named_event , DataStream ) trigger = TrueTrigger ( ) else : stream = trigger_def . explicit_stream trigger = trigger_def . explicit_trigger return ( stream , trigger )
Convert a TriggerDefinition into a stream trigger pair .
22,322
def _parse_trigger ( self , trigger_clause ) : cond = trigger_clause [ 0 ] named_event = None explicit_stream = None explicit_trigger = None if cond . getName ( ) == 'identifier' : named_event = cond [ 0 ] elif cond . getName ( ) == 'stream_trigger' : trigger_type = cond [ 0 ] stream = cond [ 1 ] oper = cond [ 2 ] ref = cond [ 3 ] trigger = InputTrigger ( trigger_type , oper , ref ) explicit_stream = stream explicit_trigger = trigger elif cond . getName ( ) == 'stream_always' : stream = cond [ 0 ] trigger = TrueTrigger ( ) explicit_stream = stream explicit_trigger = trigger else : raise ArgumentError ( "OnBlock created from an invalid ParseResults object" , parse_results = trigger_clause ) return TriggerDefinition ( named_event , explicit_stream , explicit_trigger )
Parse a named event or explicit stream trigger into a TriggerDefinition .
22,323
def platform_default ( ) : osname = os . name if osname == 'java' : osname = os . _osType if osname == 'posix' : if sys . platform == 'cygwin' : return 'cygwin' elif sys . platform . find ( 'irix' ) != - 1 : return 'irix' elif sys . platform . find ( 'sunos' ) != - 1 : return 'sunos' elif sys . platform . find ( 'hp-ux' ) != - 1 : return 'hpux' elif sys . platform . find ( 'aix' ) != - 1 : return 'aix' elif sys . platform . find ( 'darwin' ) != - 1 : return 'darwin' else : return 'posix' elif os . name == 'os2' : return 'os2' else : return sys . platform
Return the platform string for our execution environment .
22,324
def platform_module ( name = platform_default ( ) ) : full_name = 'SCons.Platform.' + name if full_name not in sys . modules : if os . name == 'java' : eval ( full_name ) else : try : file , path , desc = imp . find_module ( name , sys . modules [ 'SCons.Platform' ] . __path__ ) try : mod = imp . load_module ( full_name , file , path , desc ) finally : if file : file . close ( ) except ImportError : try : import zipimport importer = zipimport . zipimporter ( sys . modules [ 'SCons.Platform' ] . __path__ [ 0 ] ) mod = importer . load_module ( full_name ) except ImportError : raise SCons . Errors . UserError ( "No platform named '%s'" % name ) setattr ( SCons . Platform , name , mod ) return sys . modules [ full_name ]
Return the imported module for the platform .
22,325
def Platform ( name = platform_default ( ) ) : module = platform_module ( name ) spec = PlatformSpec ( name , module . generate ) return spec
Select a canned Platform specification .
22,326
def jarSources ( target , source , env , for_signature ) : try : env [ 'JARCHDIR' ] except KeyError : jarchdir_set = False else : jarchdir_set = True jarchdir = env . subst ( '$JARCHDIR' , target = target , source = source ) if jarchdir : jarchdir = env . fs . Dir ( jarchdir ) result = [ ] for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] != "Manifest-Version" : if jarchdir_set : _chdir = jarchdir else : try : _chdir = src . attributes . java_classdir except AttributeError : _chdir = None if _chdir : src = SCons . Subst . Literal ( src . get_path ( _chdir ) ) result . append ( '-C' ) result . append ( _chdir ) result . append ( src ) return result
Only include sources that are not a manifest file .
22,327
def jarManifest ( target , source , env , for_signature ) : for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] == "Manifest-Version" : return src return ''
Look in sources for a manifest file if any .
22,328
def jarFlags ( target , source , env , for_signature ) : jarflags = env . subst ( '$JARFLAGS' , target = target , source = source ) for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] == "Manifest-Version" : if not 'm' in jarflags : return jarflags + 'm' break return jarflags
If we have a manifest make sure that the m flag is specified .
22,329
def generate ( env ) : SCons . Tool . CreateJarBuilder ( env ) SCons . Tool . CreateJavaFileBuilder ( env ) SCons . Tool . CreateJavaClassFileBuilder ( env ) SCons . Tool . CreateJavaClassDirBuilder ( env ) env . AddMethod ( Jar ) env [ 'JAR' ] = 'jar' env [ 'JARFLAGS' ] = SCons . Util . CLVar ( 'cf' ) env [ '_JARFLAGS' ] = jarFlags env [ '_JARMANIFEST' ] = jarManifest env [ '_JARSOURCES' ] = jarSources env [ '_JARCOM' ] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES' env [ 'JARCOM' ] = "${TEMPFILE('$_JARCOM','$JARCOMSTR')}" env [ 'JARSUFFIX' ] = '.jar'
Add Builders and construction variables for jar to an Environment .
22,330
def mock ( self , slot , rpc_id , value ) : address = slot . address if address not in self . mock_rpcs : self . mock_rpcs [ address ] = { } self . mock_rpcs [ address ] [ rpc_id ] = value
Store a mock return value for an RPC
22,331
def rpc ( self , address , rpc_id ) : if address in self . mock_rpcs and rpc_id in self . mock_rpcs [ address ] : value = self . mock_rpcs [ address ] [ rpc_id ] return value result = self . _call_rpc ( address , rpc_id , bytes ( ) ) if len ( result ) != 4 : self . warn ( u"RPC 0x%X on address %d: response had invalid length %d not equal to 4" % ( rpc_id , address , len ( result ) ) ) if len ( result ) < 4 : raise HardwareError ( "Response from RPC was not long enough to parse as an integer" , rpc_id = rpc_id , address = address , response_length = len ( result ) ) if len ( result ) > 4 : result = result [ : 4 ] res , = struct . unpack ( "<L" , result ) return res
Call an RPC and receive the result as an integer .
22,332
def _get_swig_version ( env , swig ) : swig = env . subst ( swig ) pipe = SCons . Action . _subproc ( env , SCons . Util . CLVar ( swig ) + [ '-version' ] , stdin = 'devnull' , stderr = 'devnull' , stdout = subprocess . PIPE ) if pipe . wait ( ) != 0 : return out = SCons . Util . to_str ( pipe . stdout . read ( ) ) match = re . search ( 'SWIG Version\s+(\S+).*' , out , re . MULTILINE ) if match : if verbose : print ( "Version is:%s" % match . group ( 1 ) ) return match . group ( 1 ) else : if verbose : print ( "Unable to detect version: [%s]" % out )
Run the SWIG command line tool to get and return the version number
22,333
def generate ( env ) : c_file , cxx_file = SCons . Tool . createCFileBuilders ( env ) c_file . suffix [ '.i' ] = swigSuffixEmitter cxx_file . suffix [ '.i' ] = swigSuffixEmitter c_file . add_action ( '.i' , SwigAction ) c_file . add_emitter ( '.i' , _swigEmitter ) cxx_file . add_action ( '.i' , SwigAction ) cxx_file . add_emitter ( '.i' , _swigEmitter ) java_file = SCons . Tool . CreateJavaFileBuilder ( env ) java_file . suffix [ '.i' ] = swigSuffixEmitter java_file . add_action ( '.i' , SwigAction ) java_file . add_emitter ( '.i' , _swigEmitter ) if 'SWIG' not in env : env [ 'SWIG' ] = env . Detect ( swigs ) or swigs [ 0 ] env [ 'SWIGVERSION' ] = _get_swig_version ( env , env [ 'SWIG' ] ) env [ 'SWIGFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'SWIGDIRECTORSUFFIX' ] = '_wrap.h' env [ 'SWIGCFILESUFFIX' ] = '_wrap$CFILESUFFIX' env [ 'SWIGCXXFILESUFFIX' ] = '_wrap$CXXFILESUFFIX' env [ '_SWIGOUTDIR' ] = r'${"-outdir \"%s\"" % SWIGOUTDIR}' env [ 'SWIGPATH' ] = [ ] env [ 'SWIGINCPREFIX' ] = '-I' env [ 'SWIGINCSUFFIX' ] = '' env [ '_SWIGINCFLAGS' ] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' env [ 'SWIGCOM' ] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES'
Add Builders and construction variables for swig to an Environment .
22,334
def _select_ftdi_channel ( channel ) : if channel < 0 or channel > 8 : raise ArgumentError ( "FTDI-selected multiplexer only has channels 0-7 valid, " "make sure you specify channel with -c channel=number" , channel = channel ) from pylibftdi import BitBangDevice bb = BitBangDevice ( auto_detach = False ) bb . direction = 0b111 bb . port = channel
Select multiplexer channel . Currently uses a FTDI chip via pylibftdi
22,335
def parse_binary_descriptor ( bindata , sensor_log = None ) : if len ( bindata ) != 14 : raise ArgumentError ( "Invalid length of binary data in streamer descriptor" , length = len ( bindata ) , expected = 14 , data = bindata ) dest_tile , stream_id , trigger , format_code , type_code = struct . unpack ( "<8sHBBBx" , bindata ) dest_id = SlotIdentifier . FromEncoded ( dest_tile ) selector = DataStreamSelector . FromEncoded ( stream_id ) format_name = DataStreamer . KnownFormatCodes . get ( format_code ) type_name = DataStreamer . KnownTypeCodes . get ( type_code ) if format_name is None : raise ArgumentError ( "Unknown format code" , code = format_code , known_code = DataStreamer . KnownFormatCodes ) if type_name is None : raise ArgumentError ( "Unknown type code" , code = type_code , known_codes = DataStreamer . KnownTypeCodes ) with_other = None if trigger & ( 1 << 7 ) : auto = False with_other = trigger & ( ( 1 << 7 ) - 1 ) elif trigger == 0 : auto = False elif trigger == 1 : auto = True else : raise ArgumentError ( "Unknown trigger type for streamer" , trigger_code = trigger ) return DataStreamer ( selector , dest_id , format_name , auto , type_name , with_other = with_other , sensor_log = sensor_log )
Convert a binary streamer descriptor into a string descriptor .
22,336
def create_binary_descriptor ( streamer ) : trigger = 0 if streamer . automatic : trigger = 1 elif streamer . with_other is not None : trigger = ( 1 << 7 ) | streamer . with_other return struct . pack ( "<8sHBBBx" , streamer . dest . encode ( ) , streamer . selector . encode ( ) , trigger , streamer . KnownFormats [ streamer . format ] , streamer . KnownTypes [ streamer . report_type ] )
Create a packed binary descriptor of a DataStreamer object .
22,337
def parse_string_descriptor ( string_desc ) : if not isinstance ( string_desc , str ) : string_desc = str ( string_desc ) if not string_desc . endswith ( ';' ) : string_desc += ';' parsed = get_streamer_parser ( ) . parseString ( string_desc ) [ 0 ] realtime = 'realtime' in parsed broadcast = 'broadcast' in parsed encrypted = 'security' in parsed and parsed [ 'security' ] == 'encrypted' signed = 'security' in parsed and parsed [ 'security' ] == 'signed' auto = 'manual' not in parsed with_other = None if 'with_other' in parsed : with_other = parsed [ 'with_other' ] auto = False dest = SlotIdentifier . FromString ( 'controller' ) if 'explicit_tile' in parsed : dest = parsed [ 'explicit_tile' ] selector = parsed [ 'selector' ] if realtime and ( encrypted or signed ) : raise SensorGraphSemanticError ( "Realtime streamers cannot be either signed or encrypted" ) if broadcast and ( encrypted or signed ) : raise SensorGraphSemanticError ( "Broadcast streamers cannot be either signed or encrypted" ) report_type = 'broadcast' if broadcast else 'telegram' dest = dest selector = selector if realtime or broadcast : report_format = u'individual' elif signed : report_format = u'signedlist_userkey' elif encrypted : raise SensorGraphSemanticError ( "Encrypted streamers are not yet supported" ) else : report_format = u'hashedlist' return DataStreamer ( selector , dest , report_format , auto , report_type = report_type , with_other = with_other )
Parse a string descriptor of a streamer into a DataStreamer object .
22,338
def generate ( env ) : link . generate ( env ) env [ 'FRAMEWORKPATHPREFIX' ] = '-F' env [ '_FRAMEWORKPATH' ] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}' env [ '_FRAMEWORKS' ] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}' env [ 'LINKCOM' ] = env [ 'LINKCOM' ] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -dynamiclib' ) env [ 'SHLINKCOM' ] = env [ 'SHLINKCOM' ] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' env [ 'LDMODULEPREFIX' ] = '' env [ 'LDMODULESUFFIX' ] = '' env [ 'LDMODULEFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -bundle' ) env [ 'LDMODULECOM' ] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
Add Builders and construction variables for applelink to an Environment .
22,339
def _generateGUID ( slnfile , name ) : m = hashlib . md5 ( ) m . update ( bytearray ( ntpath . normpath ( str ( slnfile ) ) + str ( name ) , 'utf-8' ) ) solution = m . hexdigest ( ) . upper ( ) solution = "{" + solution [ : 8 ] + "-" + solution [ 8 : 12 ] + "-" + solution [ 12 : 16 ] + "-" + solution [ 16 : 20 ] + "-" + solution [ 20 : 32 ] + "}" return solution
This generates a dummy GUID for the sln file to use . It is based on the MD5 signatures of the sln filename plus the name of the project . It basically just needs to be unique and not change with each invocation .
22,340
def makeHierarchy ( sources ) : hierarchy = { } for file in sources : path = splitFully ( file ) if len ( path ) : dict = hierarchy for part in path [ : - 1 ] : if part not in dict : dict [ part ] = { } dict = dict [ part ] dict [ path [ - 1 ] ] = file return hierarchy
Break a list of files into a hierarchy ; for each value if it is a string then it is a file . If it is a dictionary it is a folder . The string is the original path of the file .
22,341
def GenerateDSP ( dspfile , source , env ) : version_num = 6.0 if 'MSVS_VERSION' in env : version_num , suite = msvs_parse_version ( env [ 'MSVS_VERSION' ] ) if version_num >= 10.0 : g = _GenerateV10DSP ( dspfile , source , env ) g . Build ( ) elif version_num >= 7.0 : g = _GenerateV7DSP ( dspfile , source , env ) g . Build ( ) else : g = _GenerateV6DSP ( dspfile , source , env ) g . Build ( )
Generates a Project file based on the version of MSVS that is being used
22,342
def solutionEmitter ( target , source , env ) : if source [ 0 ] == target [ 0 ] : source = [ ] ( base , suff ) = SCons . Util . splitext ( str ( target [ 0 ] ) ) suff = env . subst ( '$MSVSSOLUTIONSUFFIX' ) target [ 0 ] = base + suff if not source : source = 'sln_inputs:' if 'name' in env : if SCons . Util . is_String ( env [ 'name' ] ) : source = source + ' "%s"' % env [ 'name' ] else : raise SCons . Errors . InternalError ( "name must be a string" ) if 'variant' in env : if SCons . Util . is_String ( env [ 'variant' ] ) : source = source + ' "%s"' % env [ 'variant' ] elif SCons . Util . is_List ( env [ 'variant' ] ) : for variant in env [ 'variant' ] : if SCons . Util . is_String ( variant ) : source = source + ' "%s"' % variant else : raise SCons . Errors . InternalError ( "name must be a string or a list of strings" ) else : raise SCons . Errors . InternalError ( "variant must be a string or a list of strings" ) else : raise SCons . Errors . InternalError ( "variant must be specified" ) if 'slnguid' in env : if SCons . Util . is_String ( env [ 'slnguid' ] ) : source = source + ' "%s"' % env [ 'slnguid' ] else : raise SCons . Errors . InternalError ( "slnguid must be a string" ) if 'projects' in env : if SCons . Util . is_String ( env [ 'projects' ] ) : source = source + ' "%s"' % env [ 'projects' ] elif SCons . Util . is_List ( env [ 'projects' ] ) : for t in env [ 'projects' ] : if SCons . Util . is_String ( t ) : source = source + ' "%s"' % t source = source + ' "%s"' % str ( target [ 0 ] ) source = [ SCons . Node . Python . Value ( source ) ] return ( [ target [ 0 ] ] , source )
Sets up the DSW dependencies .
22,343
def generate ( env ) : try : env [ 'BUILDERS' ] [ 'MSVSProject' ] except KeyError : env [ 'BUILDERS' ] [ 'MSVSProject' ] = projectBuilder try : env [ 'BUILDERS' ] [ 'MSVSSolution' ] except KeyError : env [ 'BUILDERS' ] [ 'MSVSSolution' ] = solutionBuilder env [ 'MSVSPROJECTCOM' ] = projectAction env [ 'MSVSSOLUTIONCOM' ] = solutionAction if SCons . Script . call_stack : env [ 'MSVSSCONSCRIPT' ] = SCons . Script . call_stack [ 0 ] . sconscript else : global default_MSVS_SConscript if default_MSVS_SConscript is None : default_MSVS_SConscript = env . File ( 'SConstruct' ) env [ 'MSVSSCONSCRIPT' ] = default_MSVS_SConscript env [ 'MSVSSCONS' ] = '"%s" -c "%s"' % ( python_executable , getExecScriptMain ( env ) ) env [ 'MSVSSCONSFLAGS' ] = '-C "${MSVSSCONSCRIPT.dir.get_abspath()}" -f ${MSVSSCONSCRIPT.name}' env [ 'MSVSSCONSCOM' ] = '$MSVSSCONS $MSVSSCONSFLAGS' env [ 'MSVSBUILDCOM' ] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env [ 'MSVSREBUILDCOM' ] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env [ 'MSVSCLEANCOM' ] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"' msvc_setup_env_once ( env ) if 'MSVS_VERSION' in env : version_num , suite = msvs_parse_version ( env [ 'MSVS_VERSION' ] ) else : ( version_num , suite ) = ( 7.0 , None ) if 'MSVS' not in env : env [ 'MSVS' ] = { } if ( version_num < 7.0 ) : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.dsp' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.dsw' elif ( version_num < 10.0 ) : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.vcproj' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.sln' else : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.vcxproj' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.sln' if ( version_num >= 10.0 ) : env [ 'MSVSENCODING' ] = 'utf-8' else : env [ 'MSVSENCODING' ] = 'Windows-1252' env [ 'GET_MSVSPROJECTSUFFIX' ] = GetMSVSProjectSuffix env [ 'GET_MSVSSOLUTIONSUFFIX' ] = GetMSVSSolutionSuffix env [ 'MSVSPROJECTSUFFIX' ] = '${GET_MSVSPROJECTSUFFIX}' env [ 'MSVSSOLUTIONSUFFIX' ] = '${GET_MSVSSOLUTIONSUFFIX}' env [ 'SCONS_HOME' ] = os . environ . get ( 'SCONS_HOME' )
Add Builders and construction variables for Microsoft Visual Studio project files to an Environment .
22,344
def PrintWorkspace ( self ) : name = self . name dspfile = os . path . relpath ( self . dspfiles [ 0 ] , self . dsw_folder_path ) self . file . write ( V6DSWHeader % locals ( ) )
writes a DSW file
22,345
def waiters ( self , path = None ) : context = self . _waiters if path is None : path = [ ] for key in path : context = context [ key ] if self . _LEAF in context : for future in context [ self . _LEAF ] : yield ( path , future ) for key in context : if key is self . _LEAF : continue yield from self . waiters ( path = path + [ key ] )
Iterate over all waiters .
22,346
def every_match ( self , callback , ** kwargs ) : if len ( kwargs ) == 0 : raise ArgumentError ( "You must specify at least one message field to wait on" ) spec = MessageSpec ( ** kwargs ) responder = self . _add_waiter ( spec , callback ) return ( spec , responder )
Invoke callback every time a matching message is received .
22,347
def remove_waiter ( self , waiter_handle ) : spec , waiter = waiter_handle self . _remove_waiter ( spec , waiter )
Remove a message callback .
22,348
def clear ( self ) : for _ , waiter in self . waiters ( ) : if isinstance ( waiter , asyncio . Future ) and not waiter . done ( ) : waiter . set_exception ( asyncio . CancelledError ( ) ) self . _waiters = { }
Clear all waiters .
22,349
def wait_for ( self , timeout = None , ** kwargs ) : if len ( kwargs ) == 0 : raise ArgumentError ( "You must specify at least one message field to wait on" ) spec = MessageSpec ( ** kwargs ) future = self . _add_waiter ( spec ) future . add_done_callback ( lambda x : self . _remove_waiter ( spec , future ) ) return asyncio . wait_for ( future , timeout = timeout )
Wait for a specific matching message or timeout .
22,350
async def process_message ( self , message , wait = True ) : to_check = deque ( [ self . _waiters ] ) ignored = True while len ( to_check ) > 0 : context = to_check . popleft ( ) waiters = context . get ( OperationManager . _LEAF , [ ] ) for waiter in waiters : if isinstance ( waiter , asyncio . Future ) : waiter . set_result ( message ) else : try : await _wait_or_launch ( self . _loop , waiter , message , wait ) except : self . _logger . warning ( "Error calling every_match callback, callback=%s, message=%s" , waiter , message , exc_info = True ) ignored = False for key in context : if key is OperationManager . _LEAF : continue message_val = _get_key ( message , key ) if message_val is _MISSING : continue next_level = context [ key ] if message_val in next_level : to_check . append ( next_level [ message_val ] ) return not ignored
Process a message to see if it wakes any waiters .
22,351
def generate ( env ) : try : bld = env [ 'BUILDERS' ] [ 'Zip' ] except KeyError : bld = ZipBuilder env [ 'BUILDERS' ] [ 'Zip' ] = bld env [ 'ZIP' ] = 'zip' env [ 'ZIPFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'ZIPCOM' ] = zipAction env [ 'ZIPCOMPRESSION' ] = zipcompression env [ 'ZIPSUFFIX' ] = '.zip' env [ 'ZIPROOT' ] = SCons . Util . CLVar ( '' )
Add Builders and construction variables for zip to an Environment .
22,352
def one_line_desc ( obj ) : logger = logging . getLogger ( __name__ ) try : doc = ParsedDocstring ( obj . __doc__ ) return doc . short_desc except : logger . warning ( "Could not parse docstring for %s" , obj , exc_info = True ) return ""
Get a one line description of a class .
22,353
def instantiate_device ( virtual_dev , config , loop ) : conf = { } if 'device' in config : conf = config [ 'device' ] try : reg = ComponentRegistry ( ) if virtual_dev . endswith ( '.py' ) : _name , dev = reg . load_extension ( virtual_dev , class_filter = VirtualIOTileDevice , unique = True ) else : _name , dev = reg . load_extensions ( 'iotile.virtual_device' , name_filter = virtual_dev , class_filter = VirtualIOTileDevice , product_name = "virtual_device" , unique = True ) return dev ( conf ) except ArgumentError as err : print ( "ERROR: Could not load virtual device (%s): %s" % ( virtual_dev , err . msg ) ) sys . exit ( 1 )
Find a virtual device by name and instantiate it
22,354
def instantiate_interface ( virtual_iface , config , loop ) : if virtual_iface == 'null' : return StandardDeviceServer ( None , { } , loop = loop ) conf = { } if 'interface' in config : conf = config [ 'interface' ] try : reg = ComponentRegistry ( ) if virtual_iface . endswith ( '.py' ) : _name , iface = reg . load_extension ( virtual_iface , class_filter = AbstractDeviceServer , unique = True ) else : _name , iface = reg . load_extensions ( 'iotile.device_server' , name_filter = virtual_iface , class_filter = AbstractDeviceServer , unique = True ) return iface ( None , conf , loop = loop ) except ArgumentError as err : print ( "ERROR: Could not load device_server (%s): %s" % ( virtual_iface , err . msg ) ) sys . exit ( 1 )
Find a virtual interface by name and instantiate it
22,355
def generate ( env ) : try : bld = env [ 'BUILDERS' ] [ 'Tar' ] except KeyError : bld = TarBuilder env [ 'BUILDERS' ] [ 'Tar' ] = bld env [ 'TAR' ] = env . Detect ( tars ) or 'gtar' env [ 'TARFLAGS' ] = SCons . Util . CLVar ( '-c' ) env [ 'TARCOM' ] = '$TAR $TARFLAGS -f $TARGET $SOURCES' env [ 'TARSUFFIX' ] = '.tar'
Add Builders and construction variables for tar to an Environment .
22,356
def register_command ( self , name , handler , validator ) : self . _commands [ name ] = ( handler , validator )
Register a coroutine command handler .
22,357
async def start ( self ) : if self . _server_task is not None : self . _logger . debug ( "AsyncValidatingWSServer.start() called twice, ignoring" ) return started_signal = self . _loop . create_future ( ) self . _server_task = self . _loop . add_task ( self . _run_server_task ( started_signal ) ) await started_signal if self . port is None : self . port = started_signal . result ( )
Start the websocket server .
22,358
async def _run_server_task ( self , started_signal ) : try : server = await websockets . serve ( self . _manage_connection , self . host , self . port ) port = server . sockets [ 0 ] . getsockname ( ) [ 1 ] started_signal . set_result ( port ) except Exception as err : self . _logger . exception ( "Error starting server on host %s, port %s" , self . host , self . port ) started_signal . set_exception ( err ) return try : while True : await asyncio . sleep ( 1 ) except asyncio . CancelledError : self . _logger . info ( "Stopping server due to stop() command" ) finally : server . close ( ) await server . wait_closed ( ) self . _logger . debug ( "Server stopped, exiting task" )
Create a BackgroundTask to manage the server .
22,359
async def send_event ( self , con , name , payload ) : message = dict ( type = "event" , name = name , payload = payload ) encoded = pack ( message ) await con . send ( encoded )
Send an event to a client connection .
22,360
def DviPdfPsFunction ( XXXDviAction , target = None , source = None , env = None ) : try : abspath = source [ 0 ] . attributes . path except AttributeError : abspath = '' saved_env = SCons . Scanner . LaTeX . modify_env_var ( env , 'TEXPICTS' , abspath ) result = XXXDviAction ( target , source , env ) if saved_env is _null : try : del env [ 'ENV' ] [ 'TEXPICTS' ] except KeyError : pass else : env [ 'ENV' ] [ 'TEXPICTS' ] = saved_env return result
A builder for DVI files that sets the TEXPICTS environment variable before running dvi2ps or dvipdf .
22,361
def PDFEmitter ( target , source , env ) : def strip_suffixes ( n ) : return not SCons . Util . splitext ( str ( n ) ) [ 1 ] in [ '.aux' , '.log' ] source = [ src for src in source if strip_suffixes ( src ) ] return ( target , source )
Strips any . aux or . log files from the input source list . These are created by the TeX Builder that in all likelihood was used to generate the . dvi file we re using as input and we only care about the . dvi file .
22,362
def generate ( env ) : global PDFAction if PDFAction is None : PDFAction = SCons . Action . Action ( '$DVIPDFCOM' , '$DVIPDFCOMSTR' ) global DVIPDFAction if DVIPDFAction is None : DVIPDFAction = SCons . Action . Action ( DviPdfFunction , strfunction = DviPdfStrFunction ) from . import pdf pdf . generate ( env ) bld = env [ 'BUILDERS' ] [ 'PDF' ] bld . add_action ( '.dvi' , DVIPDFAction ) bld . add_emitter ( '.dvi' , PDFEmitter ) env [ 'DVIPDF' ] = 'dvipdf' env [ 'DVIPDFFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'DVIPDFCOM' ] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}' env [ 'PDFCOM' ] = [ '$DVIPDFCOM' ]
Add Builders and construction variables for dvipdf to an Environment .
22,363
def FromString ( cls , desc ) : parse_exp = Literal ( u'run_time' ) . suppress ( ) + time_interval ( u'interval' ) try : data = parse_exp . parseString ( desc ) return TimeBasedStopCondition ( data [ u'interval' ] [ 0 ] ) except ParseException : raise ArgumentError ( u"Could not parse time based stop condition" )
Parse this stop condition from a string representation .
22,364
def collectintargz ( target , source , env ) : sources = env . FindSourceFiles ( ) sources = [ s for s in sources if s not in target ] sources . extend ( [ s for s in source if str ( s ) . rfind ( '.spec' ) != - 1 ] ) sources . sort ( ) tarball = ( str ( target [ 0 ] ) + ".tar.gz" ) . replace ( '.rpm' , '' ) try : tarball = env [ 'SOURCE_URL' ] . split ( '/' ) [ - 1 ] except KeyError as e : raise SCons . Errors . UserError ( "Missing PackageTag '%s' for RPM packager" % e . args [ 0 ] ) tarball = src_targz . package ( env , source = sources , target = tarball , PACKAGEROOT = env [ 'PACKAGEROOT' ] , ) return ( target , tarball )
Puts all source files into a tar . gz file .
22,365
def build_specfile ( target , source , env ) : file = open ( target [ 0 ] . get_abspath ( ) , 'w' ) try : file . write ( build_specfile_header ( env ) ) file . write ( build_specfile_sections ( env ) ) file . write ( build_specfile_filesection ( env , source ) ) file . close ( ) if 'CHANGE_SPECFILE' in env : env [ 'CHANGE_SPECFILE' ] ( target , source ) except KeyError as e : raise SCons . Errors . UserError ( '"%s" package field for RPM is missing.' % e . args [ 0 ] )
Builds a RPM specfile from a dictionary with string metadata and by analyzing a tree of nodes .
22,366
def build_specfile_sections ( spec ) : str = "" mandatory_sections = { 'DESCRIPTION' : '\n%%description\n%s\n\n' , } str = str + SimpleTagCompiler ( mandatory_sections ) . compile ( spec ) optional_sections = { 'DESCRIPTION_' : '%%description -l %s\n%s\n\n' , 'CHANGELOG' : '%%changelog\n%s\n\n' , 'X_RPM_PREINSTALL' : '%%pre\n%s\n\n' , 'X_RPM_POSTINSTALL' : '%%post\n%s\n\n' , 'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n' , 'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n' , 'X_RPM_VERIFY' : '%%verify\n%s\n\n' , 'X_RPM_PREP' : '%%prep\n%s\n\n' , 'X_RPM_BUILD' : '%%build\n%s\n\n' , 'X_RPM_INSTALL' : '%%install\n%s\n\n' , 'X_RPM_CLEAN' : '%%clean\n%s\n\n' , } if 'X_RPM_PREP' not in spec : spec [ 'X_RPM_PREP' ] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q' if 'X_RPM_BUILD' not in spec : spec [ 'X_RPM_BUILD' ] = '[ ! -e "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && mkdir "$RPM_BUILD_ROOT"' if 'X_RPM_INSTALL' not in spec : spec [ 'X_RPM_INSTALL' ] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"' if 'X_RPM_CLEAN' not in spec : spec [ 'X_RPM_CLEAN' ] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' str = str + SimpleTagCompiler ( optional_sections , mandatory = 0 ) . compile ( spec ) return str
Builds the sections of a rpm specfile .
22,367
def build_specfile_header ( spec ) : str = "" mandatory_header_fields = { 'NAME' : '%%define name %s\nName: %%{name}\n' , 'VERSION' : '%%define version %s\nVersion: %%{version}\n' , 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n' , 'X_RPM_GROUP' : 'Group: %s\n' , 'SUMMARY' : 'Summary: %s\n' , 'LICENSE' : 'License: %s\n' , } str = str + SimpleTagCompiler ( mandatory_header_fields ) . compile ( spec ) optional_header_fields = { 'VENDOR' : 'Vendor: %s\n' , 'X_RPM_URL' : 'Url: %s\n' , 'SOURCE_URL' : 'Source: %s\n' , 'SUMMARY_' : 'Summary(%s): %s\n' , 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n' , 'X_RPM_ICON' : 'Icon: %s\n' , 'X_RPM_PACKAGER' : 'Packager: %s\n' , 'X_RPM_GROUP_' : 'Group(%s): %s\n' , 'X_RPM_REQUIRES' : 'Requires: %s\n' , 'X_RPM_PROVIDES' : 'Provides: %s\n' , 'X_RPM_CONFLICTS' : 'Conflicts: %s\n' , 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n' , 'X_RPM_SERIAL' : 'Serial: %s\n' , 'X_RPM_EPOCH' : 'Epoch: %s\n' , 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n' , 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n' , 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n' , 'X_RPM_PREFIX' : 'Prefix: %s\n' , 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n' , } if 'X_RPM_BUILDROOT' not in spec : spec [ 'X_RPM_BUILDROOT' ] = '%{_tmppath}/%{name}-%{version}-%{release}' str = str + SimpleTagCompiler ( optional_header_fields , mandatory = 0 ) . compile ( spec ) return str
Builds all sections but the %file of a rpm specfile
22,368
def build_specfile_filesection ( spec , files ) : str = '%files\n' if 'X_RPM_DEFATTR' not in spec : spec [ 'X_RPM_DEFATTR' ] = '(-,root,root)' str = str + '%%defattr %s\n' % spec [ 'X_RPM_DEFATTR' ] supported_tags = { 'PACKAGING_CONFIG' : '%%config %s' , 'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s' , 'PACKAGING_DOC' : '%%doc %s' , 'PACKAGING_UNIX_ATTR' : '%%attr %s' , 'PACKAGING_LANG_' : '%%lang(%s) %s' , 'PACKAGING_X_RPM_VERIFY' : '%%verify %s' , 'PACKAGING_X_RPM_DIR' : '%%dir %s' , 'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s' , 'PACKAGING_X_RPM_GHOST' : '%%ghost %s' , } for file in files : tags = { } for k in list ( supported_tags . keys ( ) ) : try : v = file . GetTag ( k ) if v : tags [ k ] = v except AttributeError : pass str = str + SimpleTagCompiler ( supported_tags , mandatory = 0 ) . compile ( tags ) str = str + ' ' str = str + file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) str = str + '\n\n' return str
builds the %file section of the specfile
22,369
def compile ( self , values ) : def is_international ( tag ) : return tag . endswith ( '_' ) def get_country_code ( tag ) : return tag [ - 2 : ] def strip_country_code ( tag ) : return tag [ : - 2 ] replacements = list ( self . tagset . items ( ) ) str = "" domestic = [ t for t in replacements if not is_international ( t [ 0 ] ) ] for key , replacement in domestic : try : str = str + replacement % values [ key ] except KeyError as e : if self . mandatory : raise e international = [ t for t in replacements if is_international ( t [ 0 ] ) ] for key , replacement in international : try : x = [ t for t in values . items ( ) if strip_country_code ( t [ 0 ] ) == key ] int_values_for_key = [ ( get_country_code ( t [ 0 ] ) , t [ 1 ] ) for t in x ] for v in int_values_for_key : str = str + replacement % v except KeyError as e : if self . mandatory : raise e return str
Compiles the tagset and returns a str containing the result
22,370
def generate ( env ) : fscan = FortranScan ( "FORTRANPATH" ) SCons . Tool . SourceFileScanner . add_scanner ( '.i' , fscan ) SCons . Tool . SourceFileScanner . add_scanner ( '.i90' , fscan ) if 'FORTRANFILESUFFIXES' not in env : env [ 'FORTRANFILESUFFIXES' ] = [ '.i' ] else : env [ 'FORTRANFILESUFFIXES' ] . append ( '.i' ) if 'F90FILESUFFIXES' not in env : env [ 'F90FILESUFFIXES' ] = [ '.i90' ] else : env [ 'F90FILESUFFIXES' ] . append ( '.i90' ) add_all_to_env ( env ) env [ 'FORTRAN' ] = 'ifl' env [ 'SHFORTRAN' ] = '$FORTRAN' env [ 'FORTRANCOM' ] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'FORTRANPPCOM' ] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'SHFORTRANCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'SHFORTRANPPCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
Add Builders and construction variables for ifl to an Environment .
22,371
def generate ( env ) : findIt ( 'bcc32' , env ) static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in [ '.c' , '.cpp' ] : static_obj . add_action ( suffix , SCons . Defaults . CAction ) shared_obj . add_action ( suffix , SCons . Defaults . ShCAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) env [ 'CC' ] = 'bcc32' env [ 'CCFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CCCOM' ] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env [ 'SHCC' ] = '$CC' env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS' ) env [ 'SHCFLAGS' ] = SCons . Util . CLVar ( '$CFLAGS' ) env [ 'SHCCCOM' ] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env [ 'CPPDEFPREFIX' ] = '-D' env [ 'CPPDEFSUFFIX' ] = '' env [ 'INCPREFIX' ] = '-I' env [ 'INCSUFFIX' ] = '' env [ 'SHOBJSUFFIX' ] = '.dll' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 0 env [ 'CFILESUFFIX' ] = '.cpp'
Add Builders and construction variables for bcc to an Environment .
22,372
def require ( builder_name ) : reg = ComponentRegistry ( ) for _name , autobuild_func in reg . load_extensions ( 'iotile.autobuild' , name_filter = builder_name ) : return autobuild_func raise BuildError ( 'Cannot find required autobuilder, make sure the distribution providing it is installed' , name = builder_name )
Find an advertised autobuilder and return it
22,373
def autobuild_onlycopy ( ) : try : family = utilities . get_family ( 'module_settings.json' ) autobuild_release ( family ) Alias ( 'release' , os . path . join ( 'build' , 'output' ) ) Default ( [ 'release' ] ) except unit_test . IOTileException as e : print ( e . format ( ) ) Exit ( 1 )
Autobuild a project that does not require building firmware pcb or documentation
22,374
def autobuild_docproject ( ) : try : family = utilities . get_family ( 'module_settings.json' ) autobuild_release ( family ) autobuild_documentation ( family . tile ) except unit_test . IOTileException as e : print ( e . format ( ) ) Exit ( 1 )
Autobuild a project that only contains documentation
22,375
def autobuild_arm_program ( elfname , test_dir = os . path . join ( 'firmware' , 'test' ) , patch = True ) : try : family = utilities . get_family ( 'module_settings.json' ) family . for_all_targets ( family . tile . short_name , lambda x : arm . build_program ( family . tile , elfname , x , patch = patch ) ) unit_test . build_units ( os . path . join ( 'firmware' , 'test' ) , family . targets ( family . tile . short_name ) ) Alias ( 'release' , os . path . join ( 'build' , 'output' ) ) Alias ( 'test' , os . path . join ( 'build' , 'test' , 'output' ) ) Default ( [ 'release' , 'test' ] ) autobuild_release ( family ) if os . path . exists ( 'doc' ) : autobuild_documentation ( family . tile ) except IOTileException as e : print ( e . format ( ) ) sys . exit ( 1 )
Build the an ARM module for all targets and build all unit tests . If pcb files are given also build those .
22,376
def autobuild_doxygen ( tile ) : iotile = IOTile ( '.' ) doxydir = os . path . join ( 'build' , 'doc' ) doxyfile = os . path . join ( doxydir , 'doxygen.txt' ) outfile = os . path . join ( doxydir , '%s.timestamp' % tile . unique_id ) env = Environment ( ENV = os . environ , tools = [ ] ) env [ 'IOTILE' ] = iotile if platform . system ( ) == 'Windows' : action = 'doxygen %s > NUL' % doxyfile else : action = 'doxygen %s > /dev/null' % doxyfile Alias ( 'doxygen' , doxydir ) env . Clean ( outfile , doxydir ) inputfile = doxygen_source_path ( ) env . Command ( doxyfile , inputfile , action = env . Action ( lambda target , source , env : generate_doxygen_file ( str ( target [ 0 ] ) , iotile ) , "Creating Doxygen Config File" ) ) env . Command ( outfile , doxyfile , action = env . Action ( action , "Building Firmware Documentation" ) )
Generate documentation for firmware in this module using doxygen
22,377
def autobuild_documentation ( tile ) : docdir = os . path . join ( '#doc' ) docfile = os . path . join ( docdir , 'conf.py' ) outdir = os . path . join ( 'build' , 'output' , 'doc' , tile . unique_id ) outfile = os . path . join ( outdir , '%s.timestamp' % tile . unique_id ) env = Environment ( ENV = os . environ , tools = [ ] ) if os . path . exists ( 'firmware' ) : autobuild_doxygen ( tile ) env . Depends ( outfile , 'doxygen' ) if platform . system ( ) == 'Windows' : action = 'sphinx-build --no-color -b html %s %s > NUL' % ( docdir [ 1 : ] , outdir ) else : action = 'sphinx-build -b html %s %s > /dev/null' % ( docdir [ 1 : ] , outdir ) env . Command ( outfile , docfile , action = env . Action ( action , "Building Component Documentation" ) ) Alias ( 'documentation' , outdir ) env . Clean ( outfile , outdir )
Generate documentation for this module using a combination of sphinx and breathe
22,378
def autobuild_bootstrap_file ( file_name , image_list ) : family = utilities . get_family ( 'module_settings.json' ) target = family . platform_independent_target ( ) resolver = ProductResolver . Create ( ) env = Environment ( tools = [ ] ) output_dir = target . build_dirs ( ) [ 'output' ] build_dir = target . build_dirs ( ) [ 'build' ] build_output_name = os . path . join ( build_dir , file_name ) full_output_name = os . path . join ( output_dir , file_name ) processed_input_images = [ ] for image_name in image_list : image_info = resolver . find_unique ( 'firmware_image' , image_name ) image_path = image_info . full_path hex_path = arm . ensure_image_is_hex ( image_path ) processed_input_images . append ( hex_path ) env . Command ( build_output_name , processed_input_images , action = Action ( arm . merge_hex_executables , "Merging %d hex files into $TARGET" % len ( processed_input_images ) ) ) env . Command ( full_output_name , build_output_name , Copy ( "$TARGET" , "$SOURCE" ) )
Combine multiple firmware images into a single bootstrap hex file .
22,379
def add_identifier ( self , name , obj ) : name = str ( name ) self . _known_identifiers [ name ] = obj
Add a known identifier resolution .
22,380
def resolve_identifier ( self , name , expected_type = None ) : name = str ( name ) if name in self . _known_identifiers : obj = self . _known_identifiers [ name ] if expected_type is not None and not isinstance ( obj , expected_type ) : raise UnresolvedIdentifierError ( u"Identifier resolved to an object of an unexpected type" , name = name , expected_type = expected_type . __name__ , resolved_type = obj . __class__ . __name__ ) return obj if self . parent is not None : try : return self . parent . resolve_identifier ( name ) except UnresolvedIdentifierError : pass raise UnresolvedIdentifierError ( u"Could not resolve identifier" , name = name , scope = self . name )
Resolve an identifier to an object .
22,381
def FromReadings ( cls , uuid , readings , root_key = AuthProvider . NoKey , signer = None , report_id = IOTileReading . InvalidReadingID , selector = 0xFFFF , streamer = 0 , sent_timestamp = 0 ) : lowest_id = IOTileReading . InvalidReadingID highest_id = IOTileReading . InvalidReadingID report_len = 20 + 16 * len ( readings ) + 24 len_low = report_len & 0xFF len_high = report_len >> 8 unique_readings = [ x . reading_id for x in readings if x . reading_id != IOTileReading . InvalidReadingID ] if len ( unique_readings ) > 0 : lowest_id = min ( unique_readings ) highest_id = max ( unique_readings ) header = struct . pack ( "<BBHLLLBBH" , cls . ReportType , len_low , len_high , uuid , report_id , sent_timestamp , root_key , streamer , selector ) header = bytearray ( header ) packed_readings = bytearray ( ) for reading in readings : packed_reading = struct . pack ( "<HHLLL" , reading . stream , 0 , reading . reading_id , reading . raw_time , reading . value ) packed_readings += bytearray ( packed_reading ) footer_stats = struct . pack ( "<LL" , lowest_id , highest_id ) if signer is None : signer = ChainedAuthProvider ( ) if root_key != signer . NoKey : enc_data = packed_readings try : result = signer . encrypt_report ( uuid , root_key , enc_data , report_id = report_id , sent_timestamp = sent_timestamp ) except NotFoundError : raise ExternalError ( "Could not encrypt report because no AuthProvider supported " "the requested encryption method for the requested device" , device_id = uuid , root_key = root_key ) signed_data = header + result [ 'data' ] + footer_stats else : signed_data = header + packed_readings + footer_stats try : signature = signer . sign_report ( uuid , root_key , signed_data , report_id = report_id , sent_timestamp = sent_timestamp ) except NotFoundError : raise ExternalError ( "Could not sign report because no AuthProvider supported the requested " "signature method for the requested device" , device_id = uuid , root_key = root_key ) footer = struct . pack ( "16s" , bytes ( signature [ 'signature' ] [ : 16 ] ) ) footer = bytearray ( footer ) data = signed_data + footer return SignedListReport ( data )
Generate an instance of the report format from a list of readings and a uuid .
22,382
def decode ( self ) : fmt , len_low , len_high , device_id , report_id , sent_timestamp , signature_flags , origin_streamer , streamer_selector = unpack ( "<BBHLLLBBH" , self . raw_report [ : 20 ] ) assert fmt == 1 length = ( len_high << 8 ) | len_low self . origin = device_id self . report_id = report_id self . sent_timestamp = sent_timestamp self . origin_streamer = origin_streamer self . streamer_selector = streamer_selector self . signature_flags = signature_flags assert len ( self . raw_report ) == length remaining = self . raw_report [ 20 : ] assert len ( remaining ) >= 24 readings = remaining [ : - 24 ] footer = remaining [ - 24 : ] lowest_id , highest_id , signature = unpack ( "<LL16s" , footer ) signature = bytearray ( signature ) self . lowest_id = lowest_id self . highest_id = highest_id self . signature = signature signed_data = self . raw_report [ : - 16 ] signer = ChainedAuthProvider ( ) if signature_flags == AuthProvider . NoKey : self . encrypted = False else : self . encrypted = True try : verification = signer . verify_report ( device_id , signature_flags , signed_data , signature , report_id = report_id , sent_timestamp = sent_timestamp ) self . verified = verification [ 'verified' ] except NotFoundError : self . verified = False if not self . verified : return [ ] , [ ] if self . encrypted : try : result = signer . decrypt_report ( device_id , signature_flags , readings , report_id = report_id , sent_timestamp = sent_timestamp ) readings = result [ 'data' ] except NotFoundError : return [ ] , [ ] assert ( len ( readings ) % 16 ) == 0 time_base = self . received_time - datetime . timedelta ( seconds = sent_timestamp ) parsed_readings = [ ] for i in range ( 0 , len ( readings ) , 16 ) : reading = readings [ i : i + 16 ] stream , _ , reading_id , timestamp , value = unpack ( "<HHLLL" , reading ) parsed = IOTileReading ( timestamp , stream , value , time_base = time_base , reading_id = reading_id ) parsed_readings . append ( parsed ) return parsed_readings , [ ]
Decode this report into a list of readings
22,383
def _add_property ( self , name , default_value ) : name = str ( name ) self . _properties [ name ] = default_value
Add a device property with a given default value .
22,384
def set ( self , name , value ) : name = str ( name ) if name not in self . _properties : raise ArgumentError ( "Unknown property in DeviceModel" , name = name ) self . _properties [ name ] = value
Set a device model property .
22,385
def get ( self , name ) : name = str ( name ) if name not in self . _properties : raise ArgumentError ( "Unknown property in DeviceModel" , name = name ) return self . _properties [ name ]
Get a device model property .
22,386
def _convert_to_bytes ( type_name , value ) : int_types = { 'uint8_t' : 'B' , 'int8_t' : 'b' , 'uint16_t' : 'H' , 'int16_t' : 'h' , 'uint32_t' : 'L' , 'int32_t' : 'l' } type_name = type_name . lower ( ) if type_name not in int_types and type_name not in [ 'string' , 'binary' ] : raise ArgumentError ( 'Type must be a known integer type, integer type array, string' , known_integers = int_types . keys ( ) , actual_type = type_name ) if type_name == 'string' : bytevalue = bytes ( value ) elif type_name == 'binary' : bytevalue = bytes ( value ) else : bytevalue = struct . pack ( "<%s" % int_types [ type_name ] , value ) return bytevalue
Convert a typed value to a binary array
22,387
def dump ( self ) : return { 'target' : str ( self . target ) , 'data' : base64 . b64encode ( self . data ) . decode ( 'utf-8' ) , 'var_id' : self . var_id , 'valid' : self . valid }
Serialize this object .
22,388
def generate_rpcs ( self , address ) : rpc_list = [ ] for offset in range ( 2 , len ( self . data ) , 16 ) : rpc = ( address , rpcs . SET_CONFIG_VARIABLE , self . var_id , offset - 2 , self . data [ offset : offset + 16 ] ) rpc_list . append ( rpc ) return rpc_list
Generate the RPCs needed to stream this config variable to a tile .
22,389
def Restore ( cls , state ) : target = SlotIdentifier . FromString ( state . get ( 'target' ) ) data = base64 . b64decode ( state . get ( 'data' ) ) var_id = state . get ( 'var_id' ) valid = state . get ( 'valid' ) return ConfigEntry ( target , var_id , data , valid )
Unserialize this object .
22,390
def compact ( self ) : saved_length = 0 to_remove = [ ] for i , entry in enumerate ( self . entries ) : if not entry . valid : to_remove . append ( i ) saved_length += entry . data_space ( ) for i in reversed ( to_remove ) : del self . entries [ i ] self . data_index -= saved_length
Remove all invalid config entries .
22,391
def start_entry ( self , target , var_id ) : self . in_progress = ConfigEntry ( target , var_id , b'' ) if self . data_size - self . data_index < self . in_progress . data_space ( ) : return Error . DESTINATION_BUFFER_TOO_SMALL self . in_progress . data += struct . pack ( "<H" , var_id ) self . data_index += self . in_progress . data_space ( ) return Error . NO_ERROR
Begin a new config database entry .
22,392
def add_data ( self , data ) : if self . data_size - self . data_index < len ( data ) : return Error . DESTINATION_BUFFER_TOO_SMALL if self . in_progress is not None : self . in_progress . data += data return Error . NO_ERROR
Add data to the currently in progress entry .
22,393
def end_entry ( self ) : if self . in_progress is None : return Error . NO_ERROR if self . in_progress . data_space ( ) == 2 : return Error . INPUT_BUFFER_WRONG_SIZE for entry in self . entries : if entry . target == self . in_progress . target and entry . var_id == self . in_progress . var_id : entry . valid = False self . entries . append ( self . in_progress ) self . data_index += self . in_progress . data_space ( ) - 2 self . in_progress = None return Error . NO_ERROR
Finish a previously started config database entry .
22,394
def stream_matching ( self , address , name ) : matching = [ x for x in self . entries if x . valid and x . target . matches ( address , name ) ] rpc_list = [ ] for var in matching : rpc_list . extend ( var . generate_rpcs ( address ) ) return rpc_list
Return the RPCs needed to stream matching config variables to the given tile .
22,395
def add_direct ( self , target , var_id , var_type , data ) : data = struct . pack ( "<H" , var_id ) + _convert_to_bytes ( var_type , data ) if self . data_size - self . data_index < len ( data ) : raise DataError ( "Not enough space for data in new conig entry" , needed_space = len ( data ) , actual_space = ( self . data_size - self . data_index ) ) new_entry = ConfigEntry ( target , var_id , data ) for entry in self . entries : if entry . target == new_entry . target and entry . var_id == new_entry . var_id : entry . valid = False self . entries . append ( new_entry ) self . data_index += new_entry . data_space ( )
Directly add a config variable .
22,396
def start_config_var_entry ( self , var_id , encoded_selector ) : selector = SlotIdentifier . FromEncoded ( encoded_selector ) err = self . config_database . start_entry ( selector , var_id ) return [ err ]
Start a new config variable entry .
22,397
def get_config_var_entry ( self , index ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , 0 , 0 , 0 , b'\0' * 8 , 0 , 0 ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , 0 , 0 , 0 , b'\0' * 8 , 0 , 0 ] offset = sum ( x . data_space ( ) for x in self . config_database . entries [ : index - 1 ] ) return [ Error . NO_ERROR , self . config_database . ENTRY_MAGIC , offset , entry . data_space ( ) , entry . target . encode ( ) , 0xFF , 0 ]
Get the metadata from the selected config variable entry .
22,398
def get_config_var_data ( self , index , offset ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , b'' ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , b'' ] if offset >= len ( entry . data ) : return [ Error . INVALID_ARRAY_KEY , b'' ] data_chunk = entry . data [ offset : offset + 16 ] return [ Error . NO_ERROR , data_chunk ]
Get a chunk of data for a config variable .
22,399
def invalidate_config_var_entry ( self , index ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , b'' ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , b'' ] entry . valid = False return [ Error . NO_ERROR ]
Mark a config variable as invalid .