idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
22,200
def execute_action_list ( obj , target , kw ) : env = obj . get_build_env ( ) kw = obj . get_kw ( kw ) status = 0 for act in obj . get_action_list ( ) : args = ( [ ] , [ ] , env ) status = act ( * args , * * kw ) if isinstance ( status , SCons . Errors . BuildError ) : status . executor = obj raise status elif status : msg = "Error %s" % status raise SCons . Errors . BuildError ( errstr = msg , node = obj . batches [ 0 ] . targets , executor = obj , action = act ) return status
Actually execute the action list .
149
6
22,201
def get_all_targets ( self ) : result = [ ] for batch in self . batches : result . extend ( batch . targets ) return result
Returns all targets for all batches of this Executor .
33
11
22,202
def get_all_sources ( self ) : result = [ ] for batch in self . batches : result . extend ( batch . sources ) return result
Returns all sources for all batches of this Executor .
32
11
22,203
def get_action_side_effects ( self ) : result = SCons . Util . UniqueList ( [ ] ) for target in self . get_action_targets ( ) : result . extend ( target . side_effects ) return result
Returns all side effects for all batches of this Executor used by the underlying Action .
53
17
22,204
def get_build_env ( self ) : try : return self . _memo [ 'get_build_env' ] except KeyError : pass # Create the build environment instance with appropriate # overrides. These get evaluated against the current # environment's construction variables so that users can # add to existing values by referencing the variable in # the expansion. overrides = { } for odict in self . overridelist : overrides . update ( odict ) import SCons . Defaults env = self . env or SCons . Defaults . DefaultEnvironment ( ) build_env = env . Override ( overrides ) self . _memo [ 'get_build_env' ] = build_env return build_env
Fetch or create the appropriate build Environment for this Executor .
152
13
22,205
def get_build_scanner_path ( self , scanner ) : env = self . get_build_env ( ) try : cwd = self . batches [ 0 ] . targets [ 0 ] . cwd except ( IndexError , AttributeError ) : cwd = None return scanner . path ( env , cwd , self . get_all_targets ( ) , self . get_all_sources ( ) )
Fetch the scanner path for this executor s targets and sources .
92
14
22,206
def add_sources ( self , sources ) : # TODO(batch): extend to multiple batches assert ( len ( self . batches ) == 1 ) # TODO(batch): remove duplicates? sources = [ x for x in sources if x not in self . batches [ 0 ] . sources ] self . batches [ 0 ] . sources . extend ( sources )
Add source files to this Executor s list . This is necessary for multi Builders that can be called repeatedly to build up a source file list for a given target .
76
34
22,207
def add_batch ( self , targets , sources ) : self . batches . append ( Batch ( targets , sources ) )
Add pair of associated target and source to this Executor s list . This is necessary for batch Builders that can be called repeatedly to build up a list of matching target and source files that will be used in order to update multiple target files at once from multiple corresponding source files for tools like MSVC that support it .
26
64
22,208
def get_contents ( self ) : try : return self . _memo [ 'get_contents' ] except KeyError : pass env = self . get_build_env ( ) action_list = self . get_action_list ( ) all_targets = self . get_all_targets ( ) all_sources = self . get_all_sources ( ) result = bytearray ( "" , 'utf-8' ) . join ( [ action . get_contents ( all_targets , all_sources , env ) for action in action_list ] ) self . _memo [ 'get_contents' ] = result return result
Fetch the signature contents . This is the main reason this class exists so we can compute this once and cache it regardless of how many target or source Nodes there are .
150
35
22,209
def get_implicit_deps ( self ) : result = [ ] build_env = self . get_build_env ( ) for act in self . get_action_list ( ) : deps = act . get_implicit_deps ( self . get_all_targets ( ) , self . get_all_sources ( ) , build_env ) result . extend ( deps ) return result
Return the executor s implicit dependencies i . e . the nodes of the commands to be executed .
91
20
22,210
def _morph ( self ) : batches = self . batches self . __class__ = Executor self . __init__ ( [ ] ) self . batches = batches
Morph this Null executor to a real Executor object .
34
13
22,211
def LoadPlugins ( cls ) : if cls . PLUGINS_LOADED : return reg = ComponentRegistry ( ) for _ , record in reg . load_extensions ( 'iotile.update_record' ) : cls . RegisterRecordType ( record ) cls . PLUGINS_LOADED = True
Load all registered iotile . update_record plugins .
70
12
22,212
def RegisterRecordType ( cls , record_class ) : record_type = record_class . MatchType ( ) if record_type not in UpdateRecord . KNOWN_CLASSES : UpdateRecord . KNOWN_CLASSES [ record_type ] = [ ] UpdateRecord . KNOWN_CLASSES [ record_type ] . append ( record_class )
Register a known record type in KNOWN_CLASSES .
76
12
22,213
def _setup ( self ) : # Create a root system ticks and user configurable ticks systick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) fasttick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) user1tick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) user2tick = self . allocator . allocate_stream ( DataStream . CounterType , attach = True ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( system_tick , systick ) ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( fast_tick , fasttick ) ) self . sensor_graph . add_config ( SlotIdentifier . FromString ( 'controller' ) , config_fast_tick_secs , 'uint32_t' , 1 ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( tick_1 , user1tick ) ) self . sensor_graph . add_node ( "({} always) => {} using copy_all_a" . format ( tick_2 , user2tick ) ) self . system_tick = systick self . fast_tick = fasttick self . user1_tick = user1tick self . user2_tick = user2tick
Prepare for code generation by setting up root clock nodes .
326
12
22,214
def find_proxy_plugin ( component , plugin_name ) : reg = ComponentRegistry ( ) plugins = reg . load_extensions ( 'iotile.proxy_plugin' , comp_filter = component , class_filter = TileBusProxyPlugin , product_name = 'proxy_plugin' ) for _name , plugin in plugins : if plugin . __name__ == plugin_name : return plugin raise DataError ( "Could not find proxy plugin module in registered components or installed distributions" , component = component , name = plugin_name )
Attempt to find a proxy plugin provided by a specific component
114
11
22,215
def _convert_trigger ( self , trigger_def , parent ) : if trigger_def . explicit_stream is None : stream = parent . resolve_identifier ( trigger_def . named_event , DataStream ) trigger = TrueTrigger ( ) else : stream = trigger_def . explicit_stream trigger = trigger_def . explicit_trigger return ( stream , trigger )
Convert a TriggerDefinition into a stream trigger pair .
79
11
22,216
def _parse_trigger ( self , trigger_clause ) : cond = trigger_clause [ 0 ] named_event = None explicit_stream = None explicit_trigger = None # Identifier parse tree is Group(Identifier) if cond . getName ( ) == 'identifier' : named_event = cond [ 0 ] elif cond . getName ( ) == 'stream_trigger' : trigger_type = cond [ 0 ] stream = cond [ 1 ] oper = cond [ 2 ] ref = cond [ 3 ] trigger = InputTrigger ( trigger_type , oper , ref ) explicit_stream = stream explicit_trigger = trigger elif cond . getName ( ) == 'stream_always' : stream = cond [ 0 ] trigger = TrueTrigger ( ) explicit_stream = stream explicit_trigger = trigger else : raise ArgumentError ( "OnBlock created from an invalid ParseResults object" , parse_results = trigger_clause ) return TriggerDefinition ( named_event , explicit_stream , explicit_trigger )
Parse a named event or explicit stream trigger into a TriggerDefinition .
215
14
22,217
def platform_default ( ) : osname = os . name if osname == 'java' : osname = os . _osType if osname == 'posix' : if sys . platform == 'cygwin' : return 'cygwin' elif sys . platform . find ( 'irix' ) != - 1 : return 'irix' elif sys . platform . find ( 'sunos' ) != - 1 : return 'sunos' elif sys . platform . find ( 'hp-ux' ) != - 1 : return 'hpux' elif sys . platform . find ( 'aix' ) != - 1 : return 'aix' elif sys . platform . find ( 'darwin' ) != - 1 : return 'darwin' else : return 'posix' elif os . name == 'os2' : return 'os2' else : return sys . platform
Return the platform string for our execution environment .
195
9
22,218
def platform_module ( name = platform_default ( ) ) : full_name = 'SCons.Platform.' + name if full_name not in sys . modules : if os . name == 'java' : eval ( full_name ) else : try : file , path , desc = imp . find_module ( name , sys . modules [ 'SCons.Platform' ] . __path__ ) try : mod = imp . load_module ( full_name , file , path , desc ) finally : if file : file . close ( ) except ImportError : try : import zipimport importer = zipimport . zipimporter ( sys . modules [ 'SCons.Platform' ] . __path__ [ 0 ] ) mod = importer . load_module ( full_name ) except ImportError : raise SCons . Errors . UserError ( "No platform named '%s'" % name ) setattr ( SCons . Platform , name , mod ) return sys . modules [ full_name ]
Return the imported module for the platform .
211
8
22,219
def Platform ( name = platform_default ( ) ) : module = platform_module ( name ) spec = PlatformSpec ( name , module . generate ) return spec
Select a canned Platform specification .
33
6
22,220
def jarSources ( target , source , env , for_signature ) : try : env [ 'JARCHDIR' ] except KeyError : jarchdir_set = False else : jarchdir_set = True jarchdir = env . subst ( '$JARCHDIR' , target = target , source = source ) if jarchdir : jarchdir = env . fs . Dir ( jarchdir ) result = [ ] for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] != "Manifest-Version" : if jarchdir_set : _chdir = jarchdir else : try : _chdir = src . attributes . java_classdir except AttributeError : _chdir = None if _chdir : # If we are changing the dir with -C, then sources should # be relative to that directory. src = SCons . Subst . Literal ( src . get_path ( _chdir ) ) result . append ( '-C' ) result . append ( _chdir ) result . append ( src ) return result
Only include sources that are not a manifest file .
235
10
22,221
def jarManifest ( target , source , env , for_signature ) : for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] == "Manifest-Version" : return src return ''
Look in sources for a manifest file if any .
52
10
22,222
def jarFlags ( target , source , env , for_signature ) : jarflags = env . subst ( '$JARFLAGS' , target = target , source = source ) for src in source : contents = src . get_text_contents ( ) if contents [ : 16 ] == "Manifest-Version" : if not 'm' in jarflags : return jarflags + 'm' break return jarflags
If we have a manifest make sure that the m flag is specified .
90
14
22,223
def generate ( env ) : SCons . Tool . CreateJarBuilder ( env ) SCons . Tool . CreateJavaFileBuilder ( env ) SCons . Tool . CreateJavaClassFileBuilder ( env ) SCons . Tool . CreateJavaClassDirBuilder ( env ) env . AddMethod ( Jar ) env [ 'JAR' ] = 'jar' env [ 'JARFLAGS' ] = SCons . Util . CLVar ( 'cf' ) env [ '_JARFLAGS' ] = jarFlags env [ '_JARMANIFEST' ] = jarManifest env [ '_JARSOURCES' ] = jarSources env [ '_JARCOM' ] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES' env [ 'JARCOM' ] = "${TEMPFILE('$_JARCOM','$JARCOMSTR')}" env [ 'JARSUFFIX' ] = '.jar'
Add Builders and construction variables for jar to an Environment .
215
12
22,224
def mock ( self , slot , rpc_id , value ) : address = slot . address if address not in self . mock_rpcs : self . mock_rpcs [ address ] = { } self . mock_rpcs [ address ] [ rpc_id ] = value
Store a mock return value for an RPC
63
8
22,225
def rpc ( self , address , rpc_id ) : # Always allow mocking an RPC to override whatever the defaul behavior is if address in self . mock_rpcs and rpc_id in self . mock_rpcs [ address ] : value = self . mock_rpcs [ address ] [ rpc_id ] return value result = self . _call_rpc ( address , rpc_id , bytes ( ) ) if len ( result ) != 4 : self . warn ( u"RPC 0x%X on address %d: response had invalid length %d not equal to 4" % ( rpc_id , address , len ( result ) ) ) if len ( result ) < 4 : raise HardwareError ( "Response from RPC was not long enough to parse as an integer" , rpc_id = rpc_id , address = address , response_length = len ( result ) ) if len ( result ) > 4 : result = result [ : 4 ] res , = struct . unpack ( "<L" , result ) return res
Call an RPC and receive the result as an integer .
228
11
22,226
def _get_swig_version ( env , swig ) : swig = env . subst ( swig ) pipe = SCons . Action . _subproc ( env , SCons . Util . CLVar ( swig ) + [ '-version' ] , stdin = 'devnull' , stderr = 'devnull' , stdout = subprocess . PIPE ) if pipe . wait ( ) != 0 : return # MAYBE: out = SCons.Util.to_str (pipe.stdout.read()) out = SCons . Util . to_str ( pipe . stdout . read ( ) ) match = re . search ( 'SWIG Version\s+(\S+).*' , out , re . MULTILINE ) if match : if verbose : print ( "Version is:%s" % match . group ( 1 ) ) return match . group ( 1 ) else : if verbose : print ( "Unable to detect version: [%s]" % out )
Run the SWIG command line tool to get and return the version number
221
14
22,227
def generate ( env ) : c_file , cxx_file = SCons . Tool . createCFileBuilders ( env ) c_file . suffix [ '.i' ] = swigSuffixEmitter cxx_file . suffix [ '.i' ] = swigSuffixEmitter c_file . add_action ( '.i' , SwigAction ) c_file . add_emitter ( '.i' , _swigEmitter ) cxx_file . add_action ( '.i' , SwigAction ) cxx_file . add_emitter ( '.i' , _swigEmitter ) java_file = SCons . Tool . CreateJavaFileBuilder ( env ) java_file . suffix [ '.i' ] = swigSuffixEmitter java_file . add_action ( '.i' , SwigAction ) java_file . add_emitter ( '.i' , _swigEmitter ) if 'SWIG' not in env : env [ 'SWIG' ] = env . Detect ( swigs ) or swigs [ 0 ] env [ 'SWIGVERSION' ] = _get_swig_version ( env , env [ 'SWIG' ] ) env [ 'SWIGFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'SWIGDIRECTORSUFFIX' ] = '_wrap.h' env [ 'SWIGCFILESUFFIX' ] = '_wrap$CFILESUFFIX' env [ 'SWIGCXXFILESUFFIX' ] = '_wrap$CXXFILESUFFIX' env [ '_SWIGOUTDIR' ] = r'${"-outdir \"%s\"" % SWIGOUTDIR}' env [ 'SWIGPATH' ] = [ ] env [ 'SWIGINCPREFIX' ] = '-I' env [ 'SWIGINCSUFFIX' ] = '' env [ '_SWIGINCFLAGS' ] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' env [ 'SWIGCOM' ] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES'
Add Builders and construction variables for swig to an Environment .
519
13
22,228
def _select_ftdi_channel ( channel ) : if channel < 0 or channel > 8 : raise ArgumentError ( "FTDI-selected multiplexer only has channels 0-7 valid, " "make sure you specify channel with -c channel=number" , channel = channel ) from pylibftdi import BitBangDevice bb = BitBangDevice ( auto_detach = False ) bb . direction = 0b111 bb . port = channel
Select multiplexer channel . Currently uses a FTDI chip via pylibftdi
98
18
22,229
def parse_binary_descriptor ( bindata , sensor_log = None ) : if len ( bindata ) != 14 : raise ArgumentError ( "Invalid length of binary data in streamer descriptor" , length = len ( bindata ) , expected = 14 , data = bindata ) dest_tile , stream_id , trigger , format_code , type_code = struct . unpack ( "<8sHBBBx" , bindata ) dest_id = SlotIdentifier . FromEncoded ( dest_tile ) selector = DataStreamSelector . FromEncoded ( stream_id ) format_name = DataStreamer . KnownFormatCodes . get ( format_code ) type_name = DataStreamer . KnownTypeCodes . get ( type_code ) if format_name is None : raise ArgumentError ( "Unknown format code" , code = format_code , known_code = DataStreamer . KnownFormatCodes ) if type_name is None : raise ArgumentError ( "Unknown type code" , code = type_code , known_codes = DataStreamer . KnownTypeCodes ) with_other = None if trigger & ( 1 << 7 ) : auto = False with_other = trigger & ( ( 1 << 7 ) - 1 ) elif trigger == 0 : auto = False elif trigger == 1 : auto = True else : raise ArgumentError ( "Unknown trigger type for streamer" , trigger_code = trigger ) return DataStreamer ( selector , dest_id , format_name , auto , type_name , with_other = with_other , sensor_log = sensor_log )
Convert a binary streamer descriptor into a string descriptor .
340
12
22,230
def create_binary_descriptor ( streamer ) : trigger = 0 if streamer . automatic : trigger = 1 elif streamer . with_other is not None : trigger = ( 1 << 7 ) | streamer . with_other return struct . pack ( "<8sHBBBx" , streamer . dest . encode ( ) , streamer . selector . encode ( ) , trigger , streamer . KnownFormats [ streamer . format ] , streamer . KnownTypes [ streamer . report_type ] )
Create a packed binary descriptor of a DataStreamer object .
112
11
22,231
def parse_string_descriptor ( string_desc ) : if not isinstance ( string_desc , str ) : string_desc = str ( string_desc ) if not string_desc . endswith ( ';' ) : string_desc += ';' parsed = get_streamer_parser ( ) . parseString ( string_desc ) [ 0 ] realtime = 'realtime' in parsed broadcast = 'broadcast' in parsed encrypted = 'security' in parsed and parsed [ 'security' ] == 'encrypted' signed = 'security' in parsed and parsed [ 'security' ] == 'signed' auto = 'manual' not in parsed with_other = None if 'with_other' in parsed : with_other = parsed [ 'with_other' ] auto = False dest = SlotIdentifier . FromString ( 'controller' ) if 'explicit_tile' in parsed : dest = parsed [ 'explicit_tile' ] selector = parsed [ 'selector' ] # Make sure all of the combination are valid if realtime and ( encrypted or signed ) : raise SensorGraphSemanticError ( "Realtime streamers cannot be either signed or encrypted" ) if broadcast and ( encrypted or signed ) : raise SensorGraphSemanticError ( "Broadcast streamers cannot be either signed or encrypted" ) report_type = 'broadcast' if broadcast else 'telegram' dest = dest selector = selector if realtime or broadcast : report_format = u'individual' elif signed : report_format = u'signedlist_userkey' elif encrypted : raise SensorGraphSemanticError ( "Encrypted streamers are not yet supported" ) else : report_format = u'hashedlist' return DataStreamer ( selector , dest , report_format , auto , report_type = report_type , with_other = with_other )
Parse a string descriptor of a streamer into a DataStreamer object .
396
15
22,232
def generate ( env ) : link . generate ( env ) env [ 'FRAMEWORKPATHPREFIX' ] = '-F' env [ '_FRAMEWORKPATH' ] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}' env [ '_FRAMEWORKS' ] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}' env [ 'LINKCOM' ] = env [ 'LINKCOM' ] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -dynamiclib' ) env [ 'SHLINKCOM' ] = env [ 'SHLINKCOM' ] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' # TODO: Work needed to generate versioned shared libraries # Leaving this commented out, and also going to disable versioned library checking for now # see: http://docstore.mik.ua/orelly/unix3/mac/ch05_04.htm for proper naming #link._setup_versioned_lib_variables(env, tool = 'applelink')#, use_soname = use_soname) #env['LINKCALLBACKS'] = link._versioned_lib_callbacks() # override the default for loadable modules, which are different # on OS X than dynamic shared libs. echoing what XCode does for # pre/suffixes: env [ 'LDMODULEPREFIX' ] = '' env [ 'LDMODULESUFFIX' ] = '' env [ 'LDMODULEFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -bundle' ) env [ 'LDMODULECOM' ] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
Add Builders and construction variables for applelink to an Environment .
486
13
22,233
def _generateGUID ( slnfile , name ) : m = hashlib . md5 ( ) # Normalize the slnfile path to a Windows path (\ separators) so # the generated file has a consistent GUID even if we generate # it on a non-Windows platform. m . update ( bytearray ( ntpath . normpath ( str ( slnfile ) ) + str ( name ) , 'utf-8' ) ) solution = m . hexdigest ( ) . upper ( ) # convert most of the signature to GUID form (discard the rest) solution = "{" + solution [ : 8 ] + "-" + solution [ 8 : 12 ] + "-" + solution [ 12 : 16 ] + "-" + solution [ 16 : 20 ] + "-" + solution [ 20 : 32 ] + "}" return solution
This generates a dummy GUID for the sln file to use . It is based on the MD5 signatures of the sln filename plus the name of the project . It basically just needs to be unique and not change with each invocation .
183
48
22,234
def makeHierarchy ( sources ) : hierarchy = { } for file in sources : path = splitFully ( file ) if len ( path ) : dict = hierarchy for part in path [ : - 1 ] : if part not in dict : dict [ part ] = { } dict = dict [ part ] dict [ path [ - 1 ] ] = file #else: # print 'Warning: failed to decompose path for '+str(file) return hierarchy
Break a list of files into a hierarchy ; for each value if it is a string then it is a file . If it is a dictionary it is a folder . The string is the original path of the file .
96
43
22,235
def GenerateDSP ( dspfile , source , env ) : version_num = 6.0 if 'MSVS_VERSION' in env : version_num , suite = msvs_parse_version ( env [ 'MSVS_VERSION' ] ) if version_num >= 10.0 : g = _GenerateV10DSP ( dspfile , source , env ) g . Build ( ) elif version_num >= 7.0 : g = _GenerateV7DSP ( dspfile , source , env ) g . Build ( ) else : g = _GenerateV6DSP ( dspfile , source , env ) g . Build ( )
Generates a Project file based on the version of MSVS that is being used
145
16
22,236
def solutionEmitter ( target , source , env ) : # todo: Not sure what sets source to what user has passed as target, # but this is what happens. When that is fixed, we also won't have # to make the user always append env['MSVSSOLUTIONSUFFIX'] to target. if source [ 0 ] == target [ 0 ] : source = [ ] # make sure the suffix is correct for the version of MSVS we're running. ( base , suff ) = SCons . Util . splitext ( str ( target [ 0 ] ) ) suff = env . subst ( '$MSVSSOLUTIONSUFFIX' ) target [ 0 ] = base + suff if not source : source = 'sln_inputs:' if 'name' in env : if SCons . Util . is_String ( env [ 'name' ] ) : source = source + ' "%s"' % env [ 'name' ] else : raise SCons . Errors . InternalError ( "name must be a string" ) if 'variant' in env : if SCons . Util . is_String ( env [ 'variant' ] ) : source = source + ' "%s"' % env [ 'variant' ] elif SCons . Util . is_List ( env [ 'variant' ] ) : for variant in env [ 'variant' ] : if SCons . Util . is_String ( variant ) : source = source + ' "%s"' % variant else : raise SCons . Errors . InternalError ( "name must be a string or a list of strings" ) else : raise SCons . Errors . InternalError ( "variant must be a string or a list of strings" ) else : raise SCons . Errors . InternalError ( "variant must be specified" ) if 'slnguid' in env : if SCons . Util . is_String ( env [ 'slnguid' ] ) : source = source + ' "%s"' % env [ 'slnguid' ] else : raise SCons . Errors . InternalError ( "slnguid must be a string" ) if 'projects' in env : if SCons . Util . is_String ( env [ 'projects' ] ) : source = source + ' "%s"' % env [ 'projects' ] elif SCons . Util . is_List ( env [ 'projects' ] ) : for t in env [ 'projects' ] : if SCons . Util . is_String ( t ) : source = source + ' "%s"' % t source = source + ' "%s"' % str ( target [ 0 ] ) source = [ SCons . Node . Python . Value ( source ) ] return ( [ target [ 0 ] ] , source )
Sets up the DSW dependencies .
595
8
22,237
def generate ( env ) : try : env [ 'BUILDERS' ] [ 'MSVSProject' ] except KeyError : env [ 'BUILDERS' ] [ 'MSVSProject' ] = projectBuilder try : env [ 'BUILDERS' ] [ 'MSVSSolution' ] except KeyError : env [ 'BUILDERS' ] [ 'MSVSSolution' ] = solutionBuilder env [ 'MSVSPROJECTCOM' ] = projectAction env [ 'MSVSSOLUTIONCOM' ] = solutionAction if SCons . Script . call_stack : # XXX Need to find a way to abstract this; the build engine # shouldn't depend on anything in SCons.Script. env [ 'MSVSSCONSCRIPT' ] = SCons . Script . call_stack [ 0 ] . sconscript else : global default_MSVS_SConscript if default_MSVS_SConscript is None : default_MSVS_SConscript = env . File ( 'SConstruct' ) env [ 'MSVSSCONSCRIPT' ] = default_MSVS_SConscript env [ 'MSVSSCONS' ] = '"%s" -c "%s"' % ( python_executable , getExecScriptMain ( env ) ) env [ 'MSVSSCONSFLAGS' ] = '-C "${MSVSSCONSCRIPT.dir.get_abspath()}" -f ${MSVSSCONSCRIPT.name}' env [ 'MSVSSCONSCOM' ] = '$MSVSSCONS $MSVSSCONSFLAGS' env [ 'MSVSBUILDCOM' ] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env [ 'MSVSREBUILDCOM' ] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env [ 'MSVSCLEANCOM' ] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"' # Set-up ms tools paths for default version msvc_setup_env_once ( env ) if 'MSVS_VERSION' in env : version_num , suite = msvs_parse_version ( env [ 'MSVS_VERSION' ] ) else : ( version_num , suite ) = ( 7.0 , None ) # guess at a default if 'MSVS' not in env : env [ 'MSVS' ] = { } if ( version_num < 7.0 ) : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.dsp' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.dsw' elif ( version_num < 10.0 ) : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.vcproj' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.sln' else : env [ 'MSVS' ] [ 'PROJECTSUFFIX' ] = '.vcxproj' env [ 'MSVS' ] [ 'SOLUTIONSUFFIX' ] = '.sln' if ( version_num >= 10.0 ) : env [ 'MSVSENCODING' ] = 'utf-8' else : env [ 'MSVSENCODING' ] = 'Windows-1252' env [ 'GET_MSVSPROJECTSUFFIX' ] = GetMSVSProjectSuffix env [ 'GET_MSVSSOLUTIONSUFFIX' ] = GetMSVSSolutionSuffix env [ 'MSVSPROJECTSUFFIX' ] = '${GET_MSVSPROJECTSUFFIX}' env [ 'MSVSSOLUTIONSUFFIX' ] = '${GET_MSVSSOLUTIONSUFFIX}' env [ 'SCONS_HOME' ] = os . environ . get ( 'SCONS_HOME' )
Add Builders and construction variables for Microsoft Visual Studio project files to an Environment .
873
16
22,238
def PrintWorkspace ( self ) : name = self . name dspfile = os . path . relpath ( self . dspfiles [ 0 ] , self . dsw_folder_path ) self . file . write ( V6DSWHeader % locals ( ) )
writes a DSW file
59
6
22,239
def waiters ( self , path = None ) : context = self . _waiters if path is None : path = [ ] for key in path : context = context [ key ] if self . _LEAF in context : for future in context [ self . _LEAF ] : yield ( path , future ) for key in context : if key is self . _LEAF : continue yield from self . waiters ( path = path + [ key ] )
Iterate over all waiters .
95
7
22,240
def every_match ( self , callback , * * kwargs ) : if len ( kwargs ) == 0 : raise ArgumentError ( "You must specify at least one message field to wait on" ) spec = MessageSpec ( * * kwargs ) responder = self . _add_waiter ( spec , callback ) return ( spec , responder )
Invoke callback every time a matching message is received .
77
11
22,241
def remove_waiter ( self , waiter_handle ) : spec , waiter = waiter_handle self . _remove_waiter ( spec , waiter )
Remove a message callback .
32
5
22,242
def clear ( self ) : for _ , waiter in self . waiters ( ) : if isinstance ( waiter , asyncio . Future ) and not waiter . done ( ) : waiter . set_exception ( asyncio . CancelledError ( ) ) self . _waiters = { }
Clear all waiters .
61
5
22,243
def wait_for ( self , timeout = None , * * kwargs ) : if len ( kwargs ) == 0 : raise ArgumentError ( "You must specify at least one message field to wait on" ) spec = MessageSpec ( * * kwargs ) future = self . _add_waiter ( spec ) future . add_done_callback ( lambda x : self . _remove_waiter ( spec , future ) ) return asyncio . wait_for ( future , timeout = timeout )
Wait for a specific matching message or timeout .
107
9
22,244
async def process_message ( self , message , wait = True ) : to_check = deque ( [ self . _waiters ] ) ignored = True while len ( to_check ) > 0 : context = to_check . popleft ( ) waiters = context . get ( OperationManager . _LEAF , [ ] ) for waiter in waiters : if isinstance ( waiter , asyncio . Future ) : waiter . set_result ( message ) else : try : await _wait_or_launch ( self . _loop , waiter , message , wait ) except : #pylint:disable=bare-except;We can't let a user callback break this routine self . _logger . warning ( "Error calling every_match callback, callback=%s, message=%s" , waiter , message , exc_info = True ) ignored = False for key in context : if key is OperationManager . _LEAF : continue message_val = _get_key ( message , key ) if message_val is _MISSING : continue next_level = context [ key ] if message_val in next_level : to_check . append ( next_level [ message_val ] ) return not ignored
Process a message to see if it wakes any waiters .
259
12
22,245
def generate ( env ) : try : bld = env [ 'BUILDERS' ] [ 'Zip' ] except KeyError : bld = ZipBuilder env [ 'BUILDERS' ] [ 'Zip' ] = bld env [ 'ZIP' ] = 'zip' env [ 'ZIPFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'ZIPCOM' ] = zipAction env [ 'ZIPCOMPRESSION' ] = zipcompression env [ 'ZIPSUFFIX' ] = '.zip' env [ 'ZIPROOT' ] = SCons . Util . CLVar ( '' )
Add Builders and construction variables for zip to an Environment .
140
12
22,246
def one_line_desc ( obj ) : logger = logging . getLogger ( __name__ ) try : doc = ParsedDocstring ( obj . __doc__ ) return doc . short_desc except : # pylint:disable=bare-except; We don't want a misbehaving exception to break the program logger . warning ( "Could not parse docstring for %s" , obj , exc_info = True ) return ""
Get a one line description of a class .
94
9
22,247
def instantiate_device ( virtual_dev , config , loop ) : conf = { } if 'device' in config : conf = config [ 'device' ] # If we're given a path to a script, try to load and use that rather than search for an installed module try : reg = ComponentRegistry ( ) if virtual_dev . endswith ( '.py' ) : _name , dev = reg . load_extension ( virtual_dev , class_filter = VirtualIOTileDevice , unique = True ) else : _name , dev = reg . load_extensions ( 'iotile.virtual_device' , name_filter = virtual_dev , class_filter = VirtualIOTileDevice , product_name = "virtual_device" , unique = True ) return dev ( conf ) except ArgumentError as err : print ( "ERROR: Could not load virtual device (%s): %s" % ( virtual_dev , err . msg ) ) sys . exit ( 1 )
Find a virtual device by name and instantiate it
210
10
22,248
def instantiate_interface ( virtual_iface , config , loop ) : # Allow the null virtual interface for testing if virtual_iface == 'null' : return StandardDeviceServer ( None , { } , loop = loop ) conf = { } if 'interface' in config : conf = config [ 'interface' ] try : reg = ComponentRegistry ( ) if virtual_iface . endswith ( '.py' ) : _name , iface = reg . load_extension ( virtual_iface , class_filter = AbstractDeviceServer , unique = True ) else : _name , iface = reg . load_extensions ( 'iotile.device_server' , name_filter = virtual_iface , class_filter = AbstractDeviceServer , unique = True ) return iface ( None , conf , loop = loop ) except ArgumentError as err : print ( "ERROR: Could not load device_server (%s): %s" % ( virtual_iface , err . msg ) ) sys . exit ( 1 )
Find a virtual interface by name and instantiate it
219
10
22,249
def generate ( env ) : try : bld = env [ 'BUILDERS' ] [ 'Tar' ] except KeyError : bld = TarBuilder env [ 'BUILDERS' ] [ 'Tar' ] = bld env [ 'TAR' ] = env . Detect ( tars ) or 'gtar' env [ 'TARFLAGS' ] = SCons . Util . CLVar ( '-c' ) env [ 'TARCOM' ] = '$TAR $TARFLAGS -f $TARGET $SOURCES' env [ 'TARSUFFIX' ] = '.tar'
Add Builders and construction variables for tar to an Environment .
136
12
22,250
def register_command ( self , name , handler , validator ) : self . _commands [ name ] = ( handler , validator )
Register a coroutine command handler .
30
7
22,251
async def start ( self ) : if self . _server_task is not None : self . _logger . debug ( "AsyncValidatingWSServer.start() called twice, ignoring" ) return started_signal = self . _loop . create_future ( ) self . _server_task = self . _loop . add_task ( self . _run_server_task ( started_signal ) ) await started_signal if self . port is None : self . port = started_signal . result ( )
Start the websocket server .
114
6
22,252
async def _run_server_task ( self , started_signal ) : try : server = await websockets . serve ( self . _manage_connection , self . host , self . port ) port = server . sockets [ 0 ] . getsockname ( ) [ 1 ] started_signal . set_result ( port ) except Exception as err : self . _logger . exception ( "Error starting server on host %s, port %s" , self . host , self . port ) started_signal . set_exception ( err ) return try : while True : await asyncio . sleep ( 1 ) except asyncio . CancelledError : self . _logger . info ( "Stopping server due to stop() command" ) finally : server . close ( ) await server . wait_closed ( ) self . _logger . debug ( "Server stopped, exiting task" )
Create a BackgroundTask to manage the server .
191
9
22,253
async def send_event ( self , con , name , payload ) : message = dict ( type = "event" , name = name , payload = payload ) encoded = pack ( message ) await con . send ( encoded )
Send an event to a client connection .
47
8
22,254
def DviPdfPsFunction ( XXXDviAction , target = None , source = None , env = None ) : try : abspath = source [ 0 ] . attributes . path except AttributeError : abspath = '' saved_env = SCons . Scanner . LaTeX . modify_env_var ( env , 'TEXPICTS' , abspath ) result = XXXDviAction ( target , source , env ) if saved_env is _null : try : del env [ 'ENV' ] [ 'TEXPICTS' ] except KeyError : pass # was never set else : env [ 'ENV' ] [ 'TEXPICTS' ] = saved_env return result
A builder for DVI files that sets the TEXPICTS environment variable before running dvi2ps or dvipdf .
150
27
22,255
def PDFEmitter ( target , source , env ) : def strip_suffixes ( n ) : return not SCons . Util . splitext ( str ( n ) ) [ 1 ] in [ '.aux' , '.log' ] source = [ src for src in source if strip_suffixes ( src ) ] return ( target , source )
Strips any . aux or . log files from the input source list . These are created by the TeX Builder that in all likelihood was used to generate the . dvi file we re using as input and we only care about the . dvi file .
76
53
22,256
def generate ( env ) : global PDFAction if PDFAction is None : PDFAction = SCons . Action . Action ( '$DVIPDFCOM' , '$DVIPDFCOMSTR' ) global DVIPDFAction if DVIPDFAction is None : DVIPDFAction = SCons . Action . Action ( DviPdfFunction , strfunction = DviPdfStrFunction ) from . import pdf pdf . generate ( env ) bld = env [ 'BUILDERS' ] [ 'PDF' ] bld . add_action ( '.dvi' , DVIPDFAction ) bld . add_emitter ( '.dvi' , PDFEmitter ) env [ 'DVIPDF' ] = 'dvipdf' env [ 'DVIPDFFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'DVIPDFCOM' ] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}' # Deprecated synonym. env [ 'PDFCOM' ] = [ '$DVIPDFCOM' ]
Add Builders and construction variables for dvipdf to an Environment .
264
15
22,257
def FromString ( cls , desc ) : parse_exp = Literal ( u'run_time' ) . suppress ( ) + time_interval ( u'interval' ) try : data = parse_exp . parseString ( desc ) return TimeBasedStopCondition ( data [ u'interval' ] [ 0 ] ) except ParseException : raise ArgumentError ( u"Could not parse time based stop condition" )
Parse this stop condition from a string representation .
91
10
22,258
def collectintargz ( target , source , env ) : # the rpm tool depends on a source package, until this is changed # this hack needs to be here that tries to pack all sources in. sources = env . FindSourceFiles ( ) # filter out the target we are building the source list for. sources = [ s for s in sources if s not in target ] # find the .spec file for rpm and add it since it is not necessarily found # by the FindSourceFiles function. sources . extend ( [ s for s in source if str ( s ) . rfind ( '.spec' ) != - 1 ] ) # sort to keep sources from changing order across builds sources . sort ( ) # as the source contains the url of the source package this rpm package # is built from, we extract the target name tarball = ( str ( target [ 0 ] ) + ".tar.gz" ) . replace ( '.rpm' , '' ) try : tarball = env [ 'SOURCE_URL' ] . split ( '/' ) [ - 1 ] except KeyError as e : raise SCons . Errors . UserError ( "Missing PackageTag '%s' for RPM packager" % e . args [ 0 ] ) tarball = src_targz . package ( env , source = sources , target = tarball , PACKAGEROOT = env [ 'PACKAGEROOT' ] , ) return ( target , tarball )
Puts all source files into a tar . gz file .
302
13
22,259
def build_specfile ( target , source , env ) : file = open ( target [ 0 ] . get_abspath ( ) , 'w' ) try : file . write ( build_specfile_header ( env ) ) file . write ( build_specfile_sections ( env ) ) file . write ( build_specfile_filesection ( env , source ) ) file . close ( ) # call a user specified function if 'CHANGE_SPECFILE' in env : env [ 'CHANGE_SPECFILE' ] ( target , source ) except KeyError as e : raise SCons . Errors . UserError ( '"%s" package field for RPM is missing.' % e . args [ 0 ] )
Builds a RPM specfile from a dictionary with string metadata and by analyzing a tree of nodes .
153
20
22,260
def build_specfile_sections ( spec ) : str = "" mandatory_sections = { 'DESCRIPTION' : '\n%%description\n%s\n\n' , } str = str + SimpleTagCompiler ( mandatory_sections ) . compile ( spec ) optional_sections = { 'DESCRIPTION_' : '%%description -l %s\n%s\n\n' , 'CHANGELOG' : '%%changelog\n%s\n\n' , 'X_RPM_PREINSTALL' : '%%pre\n%s\n\n' , 'X_RPM_POSTINSTALL' : '%%post\n%s\n\n' , 'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n' , 'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n' , 'X_RPM_VERIFY' : '%%verify\n%s\n\n' , # These are for internal use but could possibly be overridden 'X_RPM_PREP' : '%%prep\n%s\n\n' , 'X_RPM_BUILD' : '%%build\n%s\n\n' , 'X_RPM_INSTALL' : '%%install\n%s\n\n' , 'X_RPM_CLEAN' : '%%clean\n%s\n\n' , } # Default prep, build, install and clean rules # TODO: optimize those build steps, to not compile the project a second time if 'X_RPM_PREP' not in spec : spec [ 'X_RPM_PREP' ] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q' if 'X_RPM_BUILD' not in spec : spec [ 'X_RPM_BUILD' ] = '[ ! -e "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && mkdir "$RPM_BUILD_ROOT"' if 'X_RPM_INSTALL' not in spec : spec [ 'X_RPM_INSTALL' ] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"' if 'X_RPM_CLEAN' not in spec : spec [ 'X_RPM_CLEAN' ] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' str = str + SimpleTagCompiler ( optional_sections , mandatory = 0 ) . compile ( spec ) return str
Builds the sections of a rpm specfile .
661
10
22,261
def build_specfile_header ( spec ) : str = "" # first the mandatory sections mandatory_header_fields = { 'NAME' : '%%define name %s\nName: %%{name}\n' , 'VERSION' : '%%define version %s\nVersion: %%{version}\n' , 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n' , 'X_RPM_GROUP' : 'Group: %s\n' , 'SUMMARY' : 'Summary: %s\n' , 'LICENSE' : 'License: %s\n' , } str = str + SimpleTagCompiler ( mandatory_header_fields ) . compile ( spec ) # now the optional tags optional_header_fields = { 'VENDOR' : 'Vendor: %s\n' , 'X_RPM_URL' : 'Url: %s\n' , 'SOURCE_URL' : 'Source: %s\n' , 'SUMMARY_' : 'Summary(%s): %s\n' , 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n' , 'X_RPM_ICON' : 'Icon: %s\n' , 'X_RPM_PACKAGER' : 'Packager: %s\n' , 'X_RPM_GROUP_' : 'Group(%s): %s\n' , 'X_RPM_REQUIRES' : 'Requires: %s\n' , 'X_RPM_PROVIDES' : 'Provides: %s\n' , 'X_RPM_CONFLICTS' : 'Conflicts: %s\n' , 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n' , 'X_RPM_SERIAL' : 'Serial: %s\n' , 'X_RPM_EPOCH' : 'Epoch: %s\n' , 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n' , 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n' , 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n' , 'X_RPM_PREFIX' : 'Prefix: %s\n' , # internal use 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n' , } # fill in default values: # Adding a BuildRequires renders the .rpm unbuildable under System, which # are not managed by rpm, since the database to resolve this dependency is # missing (take Gentoo as an example) # if not s.has_key('x_rpm_BuildRequires'): # s['x_rpm_BuildRequires'] = 'scons' if 'X_RPM_BUILDROOT' not in spec : spec [ 'X_RPM_BUILDROOT' ] = '%{_tmppath}/%{name}-%{version}-%{release}' str = str + SimpleTagCompiler ( optional_header_fields , mandatory = 0 ) . compile ( spec ) return str
Builds all sections but the %file of a rpm specfile
736
13
22,262
def build_specfile_filesection ( spec , files ) : str = '%files\n' if 'X_RPM_DEFATTR' not in spec : spec [ 'X_RPM_DEFATTR' ] = '(-,root,root)' str = str + '%%defattr %s\n' % spec [ 'X_RPM_DEFATTR' ] supported_tags = { 'PACKAGING_CONFIG' : '%%config %s' , 'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s' , 'PACKAGING_DOC' : '%%doc %s' , 'PACKAGING_UNIX_ATTR' : '%%attr %s' , 'PACKAGING_LANG_' : '%%lang(%s) %s' , 'PACKAGING_X_RPM_VERIFY' : '%%verify %s' , 'PACKAGING_X_RPM_DIR' : '%%dir %s' , 'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s' , 'PACKAGING_X_RPM_GHOST' : '%%ghost %s' , } for file in files : # build the tagset tags = { } for k in list ( supported_tags . keys ( ) ) : try : v = file . GetTag ( k ) if v : tags [ k ] = v except AttributeError : pass # compile the tagset str = str + SimpleTagCompiler ( supported_tags , mandatory = 0 ) . compile ( tags ) str = str + ' ' str = str + file . GetTag ( 'PACKAGING_INSTALL_LOCATION' ) str = str + '\n\n' return str
builds the %file section of the specfile
403
10
22,263
def compile ( self , values ) : def is_international ( tag ) : return tag . endswith ( '_' ) def get_country_code ( tag ) : return tag [ - 2 : ] def strip_country_code ( tag ) : return tag [ : - 2 ] replacements = list ( self . tagset . items ( ) ) str = "" domestic = [ t for t in replacements if not is_international ( t [ 0 ] ) ] for key , replacement in domestic : try : str = str + replacement % values [ key ] except KeyError as e : if self . mandatory : raise e international = [ t for t in replacements if is_international ( t [ 0 ] ) ] for key , replacement in international : try : x = [ t for t in values . items ( ) if strip_country_code ( t [ 0 ] ) == key ] int_values_for_key = [ ( get_country_code ( t [ 0 ] ) , t [ 1 ] ) for t in x ] for v in int_values_for_key : str = str + replacement % v except KeyError as e : if self . mandatory : raise e return str
Compiles the tagset and returns a str containing the result
249
12
22,264
def generate ( env ) : fscan = FortranScan ( "FORTRANPATH" ) SCons . Tool . SourceFileScanner . add_scanner ( '.i' , fscan ) SCons . Tool . SourceFileScanner . add_scanner ( '.i90' , fscan ) if 'FORTRANFILESUFFIXES' not in env : env [ 'FORTRANFILESUFFIXES' ] = [ '.i' ] else : env [ 'FORTRANFILESUFFIXES' ] . append ( '.i' ) if 'F90FILESUFFIXES' not in env : env [ 'F90FILESUFFIXES' ] = [ '.i90' ] else : env [ 'F90FILESUFFIXES' ] . append ( '.i90' ) add_all_to_env ( env ) env [ 'FORTRAN' ] = 'ifl' env [ 'SHFORTRAN' ] = '$FORTRAN' env [ 'FORTRANCOM' ] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'FORTRANPPCOM' ] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'SHFORTRANCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET' env [ 'SHFORTRANPPCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
Add Builders and construction variables for ifl to an Environment .
415
13
22,265
def generate ( env ) : findIt ( 'bcc32' , env ) static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in [ '.c' , '.cpp' ] : static_obj . add_action ( suffix , SCons . Defaults . CAction ) shared_obj . add_action ( suffix , SCons . Defaults . ShCAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) env [ 'CC' ] = 'bcc32' env [ 'CCFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CCCOM' ] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env [ 'SHCC' ] = '$CC' env [ 'SHCCFLAGS' ] = SCons . Util . CLVar ( '$CCFLAGS' ) env [ 'SHCFLAGS' ] = SCons . Util . CLVar ( '$CFLAGS' ) env [ 'SHCCCOM' ] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES' env [ 'CPPDEFPREFIX' ] = '-D' env [ 'CPPDEFSUFFIX' ] = '' env [ 'INCPREFIX' ] = '-I' env [ 'INCSUFFIX' ] = '' env [ 'SHOBJSUFFIX' ] = '.dll' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 0 env [ 'CFILESUFFIX' ] = '.cpp'
Add Builders and construction variables for bcc to an Environment .
465
13
22,266
def require ( builder_name ) : reg = ComponentRegistry ( ) for _name , autobuild_func in reg . load_extensions ( 'iotile.autobuild' , name_filter = builder_name ) : return autobuild_func raise BuildError ( 'Cannot find required autobuilder, make sure the distribution providing it is installed' , name = builder_name )
Find an advertised autobuilder and return it
86
10
22,267
def autobuild_onlycopy ( ) : try : # Build only release information family = utilities . get_family ( 'module_settings.json' ) autobuild_release ( family ) Alias ( 'release' , os . path . join ( 'build' , 'output' ) ) Default ( [ 'release' ] ) except unit_test . IOTileException as e : print ( e . format ( ) ) Exit ( 1 )
Autobuild a project that does not require building firmware pcb or documentation
95
15
22,268
def autobuild_docproject ( ) : try : #Build only release information family = utilities . get_family ( 'module_settings.json' ) autobuild_release ( family ) autobuild_documentation ( family . tile ) except unit_test . IOTileException as e : print ( e . format ( ) ) Exit ( 1 )
Autobuild a project that only contains documentation
76
9
22,269
def autobuild_arm_program ( elfname , test_dir = os . path . join ( 'firmware' , 'test' ) , patch = True ) : try : #Build for all targets family = utilities . get_family ( 'module_settings.json' ) family . for_all_targets ( family . tile . short_name , lambda x : arm . build_program ( family . tile , elfname , x , patch = patch ) ) #Build all unit tests unit_test . build_units ( os . path . join ( 'firmware' , 'test' ) , family . targets ( family . tile . short_name ) ) Alias ( 'release' , os . path . join ( 'build' , 'output' ) ) Alias ( 'test' , os . path . join ( 'build' , 'test' , 'output' ) ) Default ( [ 'release' , 'test' ] ) autobuild_release ( family ) if os . path . exists ( 'doc' ) : autobuild_documentation ( family . tile ) except IOTileException as e : print ( e . format ( ) ) sys . exit ( 1 )
Build the an ARM module for all targets and build all unit tests . If pcb files are given also build those .
258
24
22,270
def autobuild_doxygen ( tile ) : iotile = IOTile ( '.' ) doxydir = os . path . join ( 'build' , 'doc' ) doxyfile = os . path . join ( doxydir , 'doxygen.txt' ) outfile = os . path . join ( doxydir , '%s.timestamp' % tile . unique_id ) env = Environment ( ENV = os . environ , tools = [ ] ) env [ 'IOTILE' ] = iotile # There is no /dev/null on Windows if platform . system ( ) == 'Windows' : action = 'doxygen %s > NUL' % doxyfile else : action = 'doxygen %s > /dev/null' % doxyfile Alias ( 'doxygen' , doxydir ) env . Clean ( outfile , doxydir ) inputfile = doxygen_source_path ( ) env . Command ( doxyfile , inputfile , action = env . Action ( lambda target , source , env : generate_doxygen_file ( str ( target [ 0 ] ) , iotile ) , "Creating Doxygen Config File" ) ) env . Command ( outfile , doxyfile , action = env . Action ( action , "Building Firmware Documentation" ) )
Generate documentation for firmware in this module using doxygen
299
12
22,271
def autobuild_documentation ( tile ) : docdir = os . path . join ( '#doc' ) docfile = os . path . join ( docdir , 'conf.py' ) outdir = os . path . join ( 'build' , 'output' , 'doc' , tile . unique_id ) outfile = os . path . join ( outdir , '%s.timestamp' % tile . unique_id ) env = Environment ( ENV = os . environ , tools = [ ] ) # Only build doxygen documentation if we have C firmware to build from if os . path . exists ( 'firmware' ) : autobuild_doxygen ( tile ) env . Depends ( outfile , 'doxygen' ) # There is no /dev/null on Windows # Also disable color output on Windows since it seems to leave powershell # in a weird state. if platform . system ( ) == 'Windows' : action = 'sphinx-build --no-color -b html %s %s > NUL' % ( docdir [ 1 : ] , outdir ) else : action = 'sphinx-build -b html %s %s > /dev/null' % ( docdir [ 1 : ] , outdir ) env . Command ( outfile , docfile , action = env . Action ( action , "Building Component Documentation" ) ) Alias ( 'documentation' , outdir ) env . Clean ( outfile , outdir )
Generate documentation for this module using a combination of sphinx and breathe
323
15
22,272
def autobuild_bootstrap_file ( file_name , image_list ) : family = utilities . get_family ( 'module_settings.json' ) target = family . platform_independent_target ( ) resolver = ProductResolver . Create ( ) env = Environment ( tools = [ ] ) output_dir = target . build_dirs ( ) [ 'output' ] build_dir = target . build_dirs ( ) [ 'build' ] build_output_name = os . path . join ( build_dir , file_name ) full_output_name = os . path . join ( output_dir , file_name ) processed_input_images = [ ] for image_name in image_list : image_info = resolver . find_unique ( 'firmware_image' , image_name ) image_path = image_info . full_path hex_path = arm . ensure_image_is_hex ( image_path ) processed_input_images . append ( hex_path ) env . Command ( build_output_name , processed_input_images , action = Action ( arm . merge_hex_executables , "Merging %d hex files into $TARGET" % len ( processed_input_images ) ) ) env . Command ( full_output_name , build_output_name , Copy ( "$TARGET" , "$SOURCE" ) )
Combine multiple firmware images into a single bootstrap hex file .
301
13
22,273
def add_identifier ( self , name , obj ) : name = str ( name ) self . _known_identifiers [ name ] = obj
Add a known identifier resolution .
31
6
22,274
def resolve_identifier ( self , name , expected_type = None ) : name = str ( name ) if name in self . _known_identifiers : obj = self . _known_identifiers [ name ] if expected_type is not None and not isinstance ( obj , expected_type ) : raise UnresolvedIdentifierError ( u"Identifier resolved to an object of an unexpected type" , name = name , expected_type = expected_type . __name__ , resolved_type = obj . __class__ . __name__ ) return obj if self . parent is not None : try : return self . parent . resolve_identifier ( name ) except UnresolvedIdentifierError : pass raise UnresolvedIdentifierError ( u"Could not resolve identifier" , name = name , scope = self . name )
Resolve an identifier to an object .
176
8
22,275
def FromReadings ( cls , uuid , readings , root_key = AuthProvider . NoKey , signer = None , report_id = IOTileReading . InvalidReadingID , selector = 0xFFFF , streamer = 0 , sent_timestamp = 0 ) : lowest_id = IOTileReading . InvalidReadingID highest_id = IOTileReading . InvalidReadingID report_len = 20 + 16 * len ( readings ) + 24 len_low = report_len & 0xFF len_high = report_len >> 8 unique_readings = [ x . reading_id for x in readings if x . reading_id != IOTileReading . InvalidReadingID ] if len ( unique_readings ) > 0 : lowest_id = min ( unique_readings ) highest_id = max ( unique_readings ) header = struct . pack ( "<BBHLLLBBH" , cls . ReportType , len_low , len_high , uuid , report_id , sent_timestamp , root_key , streamer , selector ) header = bytearray ( header ) packed_readings = bytearray ( ) for reading in readings : packed_reading = struct . pack ( "<HHLLL" , reading . stream , 0 , reading . reading_id , reading . raw_time , reading . value ) packed_readings += bytearray ( packed_reading ) footer_stats = struct . pack ( "<LL" , lowest_id , highest_id ) if signer is None : signer = ChainedAuthProvider ( ) # If we are supposed to encrypt this report, do the encryption if root_key != signer . NoKey : enc_data = packed_readings try : result = signer . encrypt_report ( uuid , root_key , enc_data , report_id = report_id , sent_timestamp = sent_timestamp ) except NotFoundError : raise ExternalError ( "Could not encrypt report because no AuthProvider supported " "the requested encryption method for the requested device" , device_id = uuid , root_key = root_key ) signed_data = header + result [ 'data' ] + footer_stats else : signed_data = header + packed_readings + footer_stats try : signature = signer . sign_report ( uuid , root_key , signed_data , report_id = report_id , sent_timestamp = sent_timestamp ) except NotFoundError : raise ExternalError ( "Could not sign report because no AuthProvider supported the requested " "signature method for the requested device" , device_id = uuid , root_key = root_key ) footer = struct . pack ( "16s" , bytes ( signature [ 'signature' ] [ : 16 ] ) ) footer = bytearray ( footer ) data = signed_data + footer return SignedListReport ( data )
Generate an instance of the report format from a list of readings and a uuid .
635
18
22,276
def decode ( self ) : fmt , len_low , len_high , device_id , report_id , sent_timestamp , signature_flags , origin_streamer , streamer_selector = unpack ( "<BBHLLLBBH" , self . raw_report [ : 20 ] ) assert fmt == 1 length = ( len_high << 8 ) | len_low self . origin = device_id self . report_id = report_id self . sent_timestamp = sent_timestamp self . origin_streamer = origin_streamer self . streamer_selector = streamer_selector self . signature_flags = signature_flags assert len ( self . raw_report ) == length remaining = self . raw_report [ 20 : ] assert len ( remaining ) >= 24 readings = remaining [ : - 24 ] footer = remaining [ - 24 : ] lowest_id , highest_id , signature = unpack ( "<LL16s" , footer ) signature = bytearray ( signature ) self . lowest_id = lowest_id self . highest_id = highest_id self . signature = signature signed_data = self . raw_report [ : - 16 ] signer = ChainedAuthProvider ( ) if signature_flags == AuthProvider . NoKey : self . encrypted = False else : self . encrypted = True try : verification = signer . verify_report ( device_id , signature_flags , signed_data , signature , report_id = report_id , sent_timestamp = sent_timestamp ) self . verified = verification [ 'verified' ] except NotFoundError : self . verified = False # If we were not able to verify the report, do not try to parse or decrypt it since we # can't guarantee who it came from. if not self . verified : return [ ] , [ ] # If the report is encrypted, try to decrypt it before parsing the readings if self . encrypted : try : result = signer . decrypt_report ( device_id , signature_flags , readings , report_id = report_id , sent_timestamp = sent_timestamp ) readings = result [ 'data' ] except NotFoundError : return [ ] , [ ] # Now parse all of the readings # Make sure this report has an integer number of readings assert ( len ( readings ) % 16 ) == 0 time_base = self . received_time - datetime . timedelta ( seconds = sent_timestamp ) parsed_readings = [ ] for i in range ( 0 , len ( readings ) , 16 ) : reading = readings [ i : i + 16 ] stream , _ , reading_id , timestamp , value = unpack ( "<HHLLL" , reading ) parsed = IOTileReading ( timestamp , stream , value , time_base = time_base , reading_id = reading_id ) parsed_readings . append ( parsed ) return parsed_readings , [ ]
Decode this report into a list of readings
625
9
22,277
def _add_property ( self , name , default_value ) : name = str ( name ) self . _properties [ name ] = default_value
Add a device property with a given default value .
32
10
22,278
def set ( self , name , value ) : name = str ( name ) if name not in self . _properties : raise ArgumentError ( "Unknown property in DeviceModel" , name = name ) self . _properties [ name ] = value
Set a device model property .
50
6
22,279
def get ( self , name ) : name = str ( name ) if name not in self . _properties : raise ArgumentError ( "Unknown property in DeviceModel" , name = name ) return self . _properties [ name ]
Get a device model property .
47
6
22,280
def _convert_to_bytes ( type_name , value ) : int_types = { 'uint8_t' : 'B' , 'int8_t' : 'b' , 'uint16_t' : 'H' , 'int16_t' : 'h' , 'uint32_t' : 'L' , 'int32_t' : 'l' } type_name = type_name . lower ( ) if type_name not in int_types and type_name not in [ 'string' , 'binary' ] : raise ArgumentError ( 'Type must be a known integer type, integer type array, string' , known_integers = int_types . keys ( ) , actual_type = type_name ) if type_name == 'string' : #value should be passed as a string bytevalue = bytes ( value ) elif type_name == 'binary' : bytevalue = bytes ( value ) else : bytevalue = struct . pack ( "<%s" % int_types [ type_name ] , value ) return bytevalue
Convert a typed value to a binary array
233
9
22,281
def dump ( self ) : return { 'target' : str ( self . target ) , 'data' : base64 . b64encode ( self . data ) . decode ( 'utf-8' ) , 'var_id' : self . var_id , 'valid' : self . valid }
Serialize this object .
65
5
22,282
def generate_rpcs ( self , address ) : rpc_list = [ ] for offset in range ( 2 , len ( self . data ) , 16 ) : rpc = ( address , rpcs . SET_CONFIG_VARIABLE , self . var_id , offset - 2 , self . data [ offset : offset + 16 ] ) rpc_list . append ( rpc ) return rpc_list
Generate the RPCs needed to stream this config variable to a tile .
92
15
22,283
def Restore ( cls , state ) : target = SlotIdentifier . FromString ( state . get ( 'target' ) ) data = base64 . b64decode ( state . get ( 'data' ) ) var_id = state . get ( 'var_id' ) valid = state . get ( 'valid' ) return ConfigEntry ( target , var_id , data , valid )
Unserialize this object .
84
6
22,284
def compact ( self ) : saved_length = 0 to_remove = [ ] for i , entry in enumerate ( self . entries ) : if not entry . valid : to_remove . append ( i ) saved_length += entry . data_space ( ) for i in reversed ( to_remove ) : del self . entries [ i ] self . data_index -= saved_length
Remove all invalid config entries .
81
6
22,285
def start_entry ( self , target , var_id ) : self . in_progress = ConfigEntry ( target , var_id , b'' ) if self . data_size - self . data_index < self . in_progress . data_space ( ) : return Error . DESTINATION_BUFFER_TOO_SMALL self . in_progress . data += struct . pack ( "<H" , var_id ) self . data_index += self . in_progress . data_space ( ) return Error . NO_ERROR
Begin a new config database entry .
116
7
22,286
def add_data ( self , data ) : if self . data_size - self . data_index < len ( data ) : return Error . DESTINATION_BUFFER_TOO_SMALL if self . in_progress is not None : self . in_progress . data += data return Error . NO_ERROR
Add data to the currently in progress entry .
69
9
22,287
def end_entry ( self ) : # Matching current firmware behavior if self . in_progress is None : return Error . NO_ERROR # Make sure there was actually data stored if self . in_progress . data_space ( ) == 2 : return Error . INPUT_BUFFER_WRONG_SIZE # Invalidate all previous copies of this config variable so we # can properly compact. for entry in self . entries : if entry . target == self . in_progress . target and entry . var_id == self . in_progress . var_id : entry . valid = False self . entries . append ( self . in_progress ) self . data_index += self . in_progress . data_space ( ) - 2 # Add in the rest of the entry size (we added two bytes at start_entry()) self . in_progress = None return Error . NO_ERROR
Finish a previously started config database entry .
186
8
22,288
def stream_matching ( self , address , name ) : matching = [ x for x in self . entries if x . valid and x . target . matches ( address , name ) ] rpc_list = [ ] for var in matching : rpc_list . extend ( var . generate_rpcs ( address ) ) return rpc_list
Return the RPCs needed to stream matching config variables to the given tile .
74
15
22,289
def add_direct ( self , target , var_id , var_type , data ) : data = struct . pack ( "<H" , var_id ) + _convert_to_bytes ( var_type , data ) if self . data_size - self . data_index < len ( data ) : raise DataError ( "Not enough space for data in new conig entry" , needed_space = len ( data ) , actual_space = ( self . data_size - self . data_index ) ) new_entry = ConfigEntry ( target , var_id , data ) for entry in self . entries : if entry . target == new_entry . target and entry . var_id == new_entry . var_id : entry . valid = False self . entries . append ( new_entry ) self . data_index += new_entry . data_space ( )
Directly add a config variable .
188
7
22,290
def start_config_var_entry ( self , var_id , encoded_selector ) : selector = SlotIdentifier . FromEncoded ( encoded_selector ) err = self . config_database . start_entry ( selector , var_id ) return [ err ]
Start a new config variable entry .
58
7
22,291
def get_config_var_entry ( self , index ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , 0 , 0 , 0 , b'\0' * 8 , 0 , 0 ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , 0 , 0 , 0 , b'\0' * 8 , 0 , 0 ] offset = sum ( x . data_space ( ) for x in self . config_database . entries [ : index - 1 ] ) return [ Error . NO_ERROR , self . config_database . ENTRY_MAGIC , offset , entry . data_space ( ) , entry . target . encode ( ) , 0xFF , 0 ]
Get the metadata from the selected config variable entry .
188
10
22,292
def get_config_var_data ( self , index , offset ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , b'' ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , b'' ] if offset >= len ( entry . data ) : return [ Error . INVALID_ARRAY_KEY , b'' ] data_chunk = entry . data [ offset : offset + 16 ] return [ Error . NO_ERROR , data_chunk ]
Get a chunk of data for a config variable .
142
10
22,293
def invalidate_config_var_entry ( self , index ) : if index == 0 or index > len ( self . config_database . entries ) : return [ Error . INVALID_ARRAY_KEY , b'' ] entry = self . config_database . entries [ index - 1 ] if not entry . valid : return [ ConfigDatabaseError . OBSOLETE_ENTRY , b'' ] entry . valid = False return [ Error . NO_ERROR ]
Mark a config variable as invalid .
100
7
22,294
def get_config_database_info ( self ) : max_size = self . config_database . data_size max_entries = self . config_database . max_entries ( ) used_size = self . config_database . data_index used_entries = len ( self . config_database . entries ) invalid_size = sum ( x . data_space ( ) for x in self . config_database . entries if not x . valid ) invalid_entries = sum ( 1 for x in self . config_database . entries if not x . valid ) return [ max_size , used_size , invalid_size , used_entries , invalid_entries , max_entries , 0 ]
Get memory usage and space statistics on the config database .
154
11
22,295
def FindByName ( cls , name ) : if name . endswith ( '.py' ) : return cls . LoadFromFile ( name ) reg = ComponentRegistry ( ) for _name , tile in reg . load_extensions ( 'iotile.virtual_tile' , name_filter = name , class_filter = VirtualTile ) : return tile raise ArgumentError ( "VirtualTile could not be found by name" , name = name )
Find an installed VirtualTile by name .
97
8
22,296
def LoadFromFile ( cls , script_path ) : _name , dev = ComponentRegistry ( ) . load_extension ( script_path , class_filter = VirtualTile , unique = True ) return dev
Import a virtual tile from a file rather than an installed module
46
12
22,297
def stage ( self ) : if 'PYPI_USER' not in os . environ or 'PYPI_PASS' not in os . environ : raise BuildError ( "You must set the PYPI_USER and PYPI_PASS environment variables" ) try : import twine except ImportError : raise BuildError ( "You must install twine in order to release python packages" , suggestion = "pip install twine" ) if not self . component . has_wheel : raise BuildError ( "You can't release a component to a PYPI repository if it doesn't have python packages" ) # Make sure we have built distributions ready to upload wheel = self . component . support_wheel sdist = "%s-%s.tar.gz" % ( self . component . support_distribution , self . component . parsed_version . pep440_string ( ) ) wheel_path = os . path . realpath ( os . path . abspath ( os . path . join ( self . component . output_folder , 'python' , wheel ) ) ) sdist_path = os . path . realpath ( os . path . abspath ( os . path . join ( self . component . output_folder , 'python' , sdist ) ) ) if not os . path . isfile ( wheel_path ) or not os . path . isfile ( sdist_path ) : raise BuildError ( "Could not find built wheel or sdist matching current built version" , sdist_path = sdist_path , wheel_path = wheel_path ) self . dists = [ sdist_path , wheel_path ]
Stage python packages for release verifying everything we can about them .
353
12
22,298
def add_data ( self , data ) : if self . state == self . ErrorState : return self . raw_data += bytearray ( data ) still_processing = True while still_processing : still_processing = self . process_data ( )
Add data to our stream emitting reports as each new one is seen
55
13
22,299
def process_data ( self ) : further_processing = False if self . state == self . WaitingForReportType and len ( self . raw_data ) > 0 : self . current_type = self . raw_data [ 0 ] try : self . current_header_size = self . calculate_header_size ( self . current_type ) self . state = self . WaitingForReportHeader further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorFindingReportType , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForReportHeader and len ( self . raw_data ) >= self . current_header_size : try : self . current_report_size = self . calculate_report_size ( self . current_type , self . raw_data [ : self . current_header_size ] ) self . state = self . WaitingForCompleteReport further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingReportHeader , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForCompleteReport and len ( self . raw_data ) >= self . current_report_size : try : report_data = self . raw_data [ : self . current_report_size ] self . raw_data = self . raw_data [ self . current_report_size : ] report = self . parse_report ( self . current_type , report_data ) self . _handle_report ( report ) self . state = self . WaitingForReportType further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingCompleteReport , str ( exc ) , self . context ) else : raise return further_processing
Attempt to extract a report from the current data stream contents
429
11