idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
22,000
def get_message ( self , message_id ) : for message in self . messages : if message . id == message_id : return message raise ArgumentError ( "Message ID not found" , message_id = message_id )
Get a message by its persistent id .
22,001
def post_message ( self , level , message , count = 1 , timestamp = None , now_reference = None ) : if len ( self . messages ) > 0 and self . messages [ - 1 ] . message == message : self . messages [ - 1 ] . count += 1 else : msg_object = ServiceMessage ( level , message , self . _last_message_id , timestamp , now_reference ) msg_object . count = count self . messages . append ( msg_object ) self . _last_message_id += 1 return self . messages [ - 1 ]
Post a new message for service .
22,002
def set_headline ( self , level , message , timestamp = None , now_reference = None ) : if self . headline is not None and self . headline . message == message : self . headline . created = monotonic ( ) self . headline . count += 1 return msg_object = ServiceMessage ( level , message , self . _last_message_id , timestamp , now_reference ) self . headline = msg_object self . _last_message_id += 1
Set the persistent headline message for this service .
22,003
def generate_doxygen_file ( output_path , iotile ) : mapping = { } mapping [ 'short_name' ] = iotile . short_name mapping [ 'full_name' ] = iotile . full_name mapping [ 'authors' ] = iotile . authors mapping [ 'version' ] = iotile . version render_template ( 'doxygen.txt.tpl' , mapping , out_path = output_path )
Fill in our default doxygen template file with info from an IOTile
22,004
def pull ( name , version , force = False ) : chain = DependencyResolverChain ( ) ver = SemanticVersionRange . FromString ( version ) chain . pull_release ( name , ver , force = force )
Pull a released IOTile component into the current working directory
22,005
def add_callback ( self , name , func ) : if name == 'on_scan' : events = [ 'device_seen' ] def callback ( _conn_string , _conn_id , _name , event ) : func ( self . id , event , event . get ( 'validity_period' , 60 ) ) elif name == 'on_report' : events = [ 'report' , 'broadcast' ] def callback ( _conn_string , conn_id , _name , event ) : func ( conn_id , event ) elif name == 'on_trace' : events = [ 'trace' ] def callback ( _conn_string , conn_id , _name , event ) : func ( conn_id , event ) elif name == 'on_disconnect' : events = [ 'disconnection' ] def callback ( _conn_string , conn_id , _name , _event ) : func ( self . id , conn_id ) else : raise ArgumentError ( "Unknown callback type {}" . format ( name ) ) self . _adapter . register_monitor ( [ None ] , events , callback )
Add a callback when device events happen .
22,006
def disconnect_async ( self , conn_id , callback ) : future = self . _loop . launch_coroutine ( self . _adapter . disconnect ( conn_id ) ) future . add_done_callback ( lambda x : self . _callback_future ( conn_id , x , callback ) )
Asynchronously disconnect from a device .
22,007
def send_script_async ( self , conn_id , data , progress_callback , callback ) : def monitor_callback ( _conn_string , _conn_id , _event_name , event ) : if event . get ( 'operation' ) != 'script' : return progress_callback ( event . get ( 'finished' ) , event . get ( 'total' ) ) async def _install_monitor ( ) : try : conn_string = self . _adapter . _get_property ( conn_id , 'connection_string' ) return self . _adapter . register_monitor ( [ conn_string ] , [ 'progress' ] , monitor_callback ) except : self . _logger . exception ( "Error installing script progress monitor" ) return None monitor_id = self . _loop . run_coroutine ( _install_monitor ( ) ) if monitor_id is None : callback ( conn_id , self . id , False , 'could not install progress monitor' ) return future = self . _loop . launch_coroutine ( self . _adapter . send_script ( conn_id , data ) ) future . add_done_callback ( lambda x : self . _callback_future ( conn_id , x , callback , monitors = [ monitor_id ] ) )
Asynchronously send a script to the device .
22,008
def lock ( self , key , client ) : self . key = key self . client = client
Set the key that will be used to ensure messages come from one party
22,009
def track_change ( self , tile , property_name , value , formatter = None ) : if not self . tracking : return if len ( self . _whitelist ) > 0 and ( tile , property_name ) not in self . _whitelist : return if formatter is None : formatter = str change = StateChange ( monotonic ( ) , tile , property_name , value , formatter ( value ) ) with self . _lock : self . changes . append ( change )
Record that a change happened on a given tile s property .
22,010
def dump ( self , out_path , header = True ) : if sys . version_info [ 0 ] < 3 : mode = "wb" else : mode = "w" with open ( out_path , mode ) as outfile : writer = csv . writer ( outfile , quoting = csv . QUOTE_MINIMAL ) if header : writer . writerow ( [ "Timestamp" , "Tile Address" , "Property Name" , "Value" ] ) for entry in self . changes : writer . writerow ( [ entry . time , entry . tile , entry . property , entry . string_value ] )
Save this list of changes as a csv file at out_path .
22,011
def generate ( env ) : global PDFTeXAction if PDFTeXAction is None : PDFTeXAction = SCons . Action . Action ( '$PDFTEXCOM' , '$PDFTEXCOMSTR' ) global PDFLaTeXAction if PDFLaTeXAction is None : PDFLaTeXAction = SCons . Action . Action ( "$PDFLATEXCOM" , "$PDFLATEXCOMSTR" ) global PDFTeXLaTeXAction if PDFTeXLaTeXAction is None : PDFTeXLaTeXAction = SCons . Action . Action ( PDFTeXLaTeXFunction , strfunction = SCons . Tool . tex . TeXLaTeXStrFunction ) env . AppendUnique ( LATEXSUFFIXES = SCons . Tool . LaTeXSuffixes ) from . import pdf pdf . generate ( env ) bld = env [ 'BUILDERS' ] [ 'PDF' ] bld . add_action ( '.tex' , PDFTeXLaTeXAction ) bld . add_emitter ( '.tex' , SCons . Tool . tex . tex_pdf_emitter ) pdf . generate2 ( env ) SCons . Tool . tex . generate_common ( env )
Add Builders and construction variables for pdftex to an Environment .
22,012
def stop ( self ) : for tile in self . _tiles . values ( ) : tile . signal_stop ( ) for tile in self . _tiles . values ( ) : tile . wait_stopped ( ) super ( TileBasedVirtualDevice , self ) . stop ( )
Stop running this virtual device including any worker threads .
22,013
def SetCacheMode ( mode ) : global cache_mode if mode == "auto" : cache_mode = AUTO elif mode == "force" : cache_mode = FORCE elif mode == "cache" : cache_mode = CACHE else : raise ValueError ( "SCons.SConf.SetCacheMode: Unknown mode " + mode )
Set the Configure cache mode . mode must be one of auto force or cache .
22,014
def CreateConfigHBuilder ( env ) : action = SCons . Action . Action ( _createConfigH , _stringConfigH ) sconfigHBld = SCons . Builder . Builder ( action = action ) env . Append ( BUILDERS = { 'SConfigHBuilder' : sconfigHBld } ) for k in list ( _ac_config_hs . keys ( ) ) : env . SConfigHBuilder ( k , env . Value ( _ac_config_hs [ k ] ) )
Called if necessary just before the building targets phase begins .
22,015
def CheckHeader ( context , header , include_quotes = '<>' , language = None ) : prog_prefix , hdr_to_check = createIncludesFromHeaders ( header , 1 , include_quotes ) res = SCons . Conftest . CheckHeader ( context , hdr_to_check , prog_prefix , language = language , include_quotes = include_quotes ) context . did_show_result = 1 return not res
A test for a C or C ++ header file .
22,016
def CheckLib ( context , library = None , symbol = "main" , header = None , language = None , autoadd = 1 ) : if library == [ ] : library = [ None ] if not SCons . Util . is_List ( library ) : library = [ library ] res = SCons . Conftest . CheckLib ( context , library , symbol , header = header , language = language , autoadd = autoadd ) context . did_show_result = 1 return not res
A test for a library . See also CheckLibWithHeader . Note that library may also be None to test whether the given symbol compiles without flags .
22,017
def CheckProg ( context , prog_name ) : res = SCons . Conftest . CheckProg ( context , prog_name ) context . did_show_result = 1 return res
Simple check if a program exists in the path . Returns the path for the application or None if not found .
22,018
def display_cached_string ( self , bi ) : if not isinstance ( bi , SConfBuildInfo ) : SCons . Warnings . warn ( SConfWarning , "The stored build information has an unexpected class: %s" % bi . __class__ ) else : self . display ( "The original builder output was:\n" + ( " |" + str ( bi . string ) ) . replace ( "\n" , "\n |" ) )
Logs the original builder messages given the SConfBuildInfo instance bi .
22,019
def Define ( self , name , value = None , comment = None ) : lines = [ ] if comment : comment_str = "/* %s */" % comment lines . append ( comment_str ) if value is not None : define_str = "#define %s %s" % ( name , value ) else : define_str = "#define %s" % name lines . append ( define_str ) lines . append ( '' ) self . config_h_text = self . config_h_text + '\n' . join ( lines )
Define a pre processor symbol name with the optional given value in the current config header .
22,020
def BuildNodes ( self , nodes ) : if self . logstream is not None : oldStdout = sys . stdout sys . stdout = self . logstream oldStderr = sys . stderr sys . stderr = self . logstream old_fs_dir = SConfFS . getcwd ( ) old_os_dir = os . getcwd ( ) SConfFS . chdir ( SConfFS . Top , change_os_dir = 1 ) for n in nodes : n . store_info = 0 if not hasattr ( n , 'attributes' ) : n . attributes = SCons . Node . Node . Attrs ( ) n . attributes . keep_targetinfo = 1 ret = 1 try : save_max_drift = SConfFS . get_max_drift ( ) SConfFS . set_max_drift ( 0 ) tm = SCons . Taskmaster . Taskmaster ( nodes , SConfBuildTask ) jobs = SCons . Job . Jobs ( 1 , tm ) jobs . run ( ) for n in nodes : state = n . get_state ( ) if ( state != SCons . Node . executed and state != SCons . Node . up_to_date ) : ret = 0 finally : SConfFS . set_max_drift ( save_max_drift ) os . chdir ( old_os_dir ) SConfFS . chdir ( old_fs_dir , change_os_dir = 0 ) if self . logstream is not None : sys . stdout = oldStdout sys . stderr = oldStderr return ret
Tries to build the given nodes immediately . Returns 1 on success 0 on error .
22,021
def pspawn_wrapper ( self , sh , escape , cmd , args , env ) : return self . pspawn ( sh , escape , cmd , args , env , self . logstream , self . logstream )
Wrapper function for handling piped spawns .
22,022
def _startup ( self ) : global _ac_config_logs global sconf_global global SConfFS self . lastEnvFs = self . env . fs self . env . fs = SConfFS self . _createDir ( self . confdir ) self . confdir . up ( ) . add_ignore ( [ self . confdir ] ) if self . logfile is not None and not dryrun : if self . logfile in _ac_config_logs : log_mode = "a" else : _ac_config_logs [ self . logfile ] = None log_mode = "w" fp = open ( str ( self . logfile ) , log_mode ) self . logstream = SCons . Util . Unbuffered ( fp ) self . logfile . dir . add_ignore ( [ self . logfile ] ) tb = traceback . extract_stack ( ) [ - 3 - self . depth ] old_fs_dir = SConfFS . getcwd ( ) SConfFS . chdir ( SConfFS . Top , change_os_dir = 0 ) self . logstream . write ( 'file %s,line %d:\n\tConfigure(confdir = %s)\n' % ( tb [ 0 ] , tb [ 1 ] , str ( self . confdir ) ) ) SConfFS . chdir ( old_fs_dir ) else : self . logstream = None action = SCons . Action . Action ( _createSource , _stringSource ) sconfSrcBld = SCons . Builder . Builder ( action = action ) self . env . Append ( BUILDERS = { 'SConfSourceBuilder' : sconfSrcBld } ) self . config_h_text = _ac_config_hs . get ( self . config_h , "" ) self . active = 1 sconf_global = self
Private method . Set up logstream and set the environment variables necessary for a piped build
22,023
def _shutdown ( self ) : global sconf_global , _ac_config_hs if not self . active : raise SCons . Errors . UserError ( "Finish may be called only once!" ) if self . logstream is not None and not dryrun : self . logstream . write ( "\n" ) self . logstream . close ( ) self . logstream = None blds = self . env [ 'BUILDERS' ] del blds [ 'SConfSourceBuilder' ] self . env . Replace ( BUILDERS = blds ) self . active = 0 sconf_global = None if not self . config_h is None : _ac_config_hs [ self . config_h ] = self . config_h_text self . env . fs = self . lastEnvFs
Private method . Reset to non - piped spawn
22,024
def Result ( self , res ) : if isinstance ( res , str ) : text = res elif res : text = "yes" else : text = "no" if self . did_show_result == 0 : self . Display ( text + "\n" ) self . did_show_result = 1
Inform about the result of the test . If res is not a string displays yes or no depending on whether res is evaluated as true or false . The result is only displayed when self . did_show_result is not set .
22,025
def linux_ver_normalize ( vstr ) : m = re . match ( r'([0-9]+)\.([0-9]+)\.([0-9]+)' , vstr ) if m : vmaj , vmin , build = m . groups ( ) return float ( vmaj ) * 10. + float ( vmin ) + float ( build ) / 1000. else : f = float ( vstr ) if is_windows : return f else : if f < 60 : return f * 10.0 else : return f
Normalize a Linux compiler version number . Intel changed from 80 to 9 . 0 in 2005 so we assume if the number is greater than 60 it s an old - style number and otherwise new - style . Always returns an old - style float like 80 or 90 for compatibility with Windows . Shades of Y2K!
22,026
def parse_node_descriptor ( desc , model ) : try : data = graph_node . parseString ( desc ) except ParseException : raise stream_desc = u' ' . join ( data [ 'node' ] ) stream = DataStream . FromString ( stream_desc ) node = SGNode ( stream , model ) inputs = [ ] if 'input_a' in data : input_a = data [ 'input_a' ] stream_a = DataStreamSelector . FromString ( u' ' . join ( input_a [ 'input_stream' ] ) ) trigger_a = None if 'type' in input_a : trigger_a = InputTrigger ( input_a [ 'type' ] , input_a [ 'op' ] , int ( input_a [ 'reference' ] , 0 ) ) inputs . append ( ( stream_a , trigger_a ) ) if 'input_b' in data : input_a = data [ 'input_b' ] stream_a = DataStreamSelector . FromString ( u' ' . join ( input_a [ 'input_stream' ] ) ) trigger_a = None if 'type' in input_a : trigger_a = InputTrigger ( input_a [ 'type' ] , input_a [ 'op' ] , int ( input_a [ 'reference' ] , 0 ) ) inputs . append ( ( stream_a , trigger_a ) ) if 'combiner' in data and str ( data [ 'combiner' ] ) == u'||' : node . trigger_combiner = SGNode . OrTriggerCombiner else : node . trigger_combiner = SGNode . AndTriggerCombiner processing = data [ 'processor' ] return node , inputs , processing
Parse a string node descriptor .
22,027
def create_binary_descriptor ( descriptor ) : func_names = { 0 : 'copy_latest_a' , 1 : 'average_a' , 2 : 'copy_all_a' , 3 : 'sum_a' , 4 : 'copy_count_a' , 5 : 'trigger_streamer' , 6 : 'call_rpc' , 7 : 'subtract_afromb' } func_codes = { y : x for x , y in func_names . items ( ) } node , inputs , processing = parse_node_descriptor ( descriptor , DeviceModel ( ) ) func_code = func_codes . get ( processing ) if func_code is None : raise ArgumentError ( "Unknown processing function" , function = processing ) stream_a , trigger_a = inputs [ 0 ] stream_a = stream_a . encode ( ) if len ( inputs ) == 2 : stream_b , trigger_b = inputs [ 1 ] stream_b = stream_b . encode ( ) else : stream_b , trigger_b = 0xFFFF , None if trigger_a is None : trigger_a = TrueTrigger ( ) if trigger_b is None : trigger_b = TrueTrigger ( ) ref_a = 0 if isinstance ( trigger_a , InputTrigger ) : ref_a = trigger_a . reference ref_b = 0 if isinstance ( trigger_b , InputTrigger ) : ref_b = trigger_b . reference trigger_a = _create_binary_trigger ( trigger_a ) trigger_b = _create_binary_trigger ( trigger_b ) combiner = node . trigger_combiner bin_desc = struct . pack ( "<LLHHHBBBB2x" , ref_a , ref_b , node . stream . encode ( ) , stream_a , stream_b , func_code , trigger_a , trigger_b , combiner ) return bin_desc
Convert a string node descriptor into a 20 - byte binary descriptor .
22,028
def parse_binary_descriptor ( bindata ) : func_names = { 0 : 'copy_latest_a' , 1 : 'average_a' , 2 : 'copy_all_a' , 3 : 'sum_a' , 4 : 'copy_count_a' , 5 : 'trigger_streamer' , 6 : 'call_rpc' , 7 : 'subtract_afromb' } if len ( bindata ) != 20 : raise ArgumentError ( "Invalid binary node descriptor with incorrect size" , size = len ( bindata ) , expected = 20 , bindata = bindata ) a_trig , b_trig , stream_id , a_id , b_id , proc , a_cond , b_cond , trig_combiner = struct . unpack ( "<LLHHHBBBB2x" , bindata ) node_stream = DataStream . FromEncoded ( stream_id ) if a_id == 0xFFFF : raise ArgumentError ( "Invalid binary node descriptor with invalid first input" , input_selector = a_id ) a_selector = DataStreamSelector . FromEncoded ( a_id ) a_trigger = _process_binary_trigger ( a_trig , a_cond ) b_selector = None b_trigger = None if b_id != 0xFFFF : b_selector = DataStreamSelector . FromEncoded ( b_id ) b_trigger = _process_binary_trigger ( b_trig , b_cond ) if trig_combiner == SGNode . AndTriggerCombiner : comb = '&&' elif trig_combiner == SGNode . OrTriggerCombiner : comb = '||' else : raise ArgumentError ( "Invalid trigger combiner in binary node descriptor" , combiner = trig_combiner ) if proc not in func_names : raise ArgumentError ( "Unknown processing function" , function_id = proc , known_functions = func_names ) func_name = func_names [ proc ] if b_selector is None : return '({} {}) => {} using {}' . format ( a_selector , a_trigger , node_stream , func_name ) return '({} {} {} {} {}) => {} using {}' . format ( a_selector , a_trigger , comb , b_selector , b_trigger , node_stream , func_name )
Convert a binary node descriptor into a string descriptor .
22,029
def _process_binary_trigger ( trigger_value , condition ) : ops = { 0 : ">" , 1 : "<" , 2 : ">=" , 3 : "<=" , 4 : "==" , 5 : 'always' } sources = { 0 : 'value' , 1 : 'count' } encoded_source = condition & 0b1 encoded_op = condition >> 1 oper = ops . get ( encoded_op , None ) source = sources . get ( encoded_source , None ) if oper is None : raise ArgumentError ( "Unknown operation in binary trigger" , condition = condition , operation = encoded_op , known_ops = ops ) if source is None : raise ArgumentError ( "Unknown value source in binary trigger" , source = source , known_sources = sources ) if oper == 'always' : return TrueTrigger ( ) return InputTrigger ( source , oper , trigger_value )
Create an InputTrigger object .
22,030
def _create_binary_trigger ( trigger ) : ops = { 0 : ">" , 1 : "<" , 2 : ">=" , 3 : "<=" , 4 : "==" , 5 : 'always' } op_codes = { y : x for x , y in ops . items ( ) } source = 0 if isinstance ( trigger , TrueTrigger ) : op_code = op_codes [ 'always' ] elif isinstance ( trigger , FalseTrigger ) : raise ArgumentError ( "Cannot express a never trigger in binary descriptor" , trigger = trigger ) else : op_code = op_codes [ trigger . comp_string ] if trigger . use_count : source = 1 return ( op_code << 1 ) | source
Create an 8 - bit binary trigger from an InputTrigger TrueTrigger FalseTrigger .
22,031
def _try_assign_utc_time ( self , raw_time , time_base ) : if raw_time != IOTileEvent . InvalidRawTime and ( raw_time & ( 1 << 31 ) ) : y2k_offset = self . raw_time ^ ( 1 << 31 ) return self . _Y2KReference + datetime . timedelta ( seconds = y2k_offset ) if time_base is not None : return time_base + datetime . timedelta ( seconds = raw_time ) return None
Try to assign a UTC time to this reading .
22,032
def asdict ( self ) : timestamp_str = None if self . reading_time is not None : timestamp_str = self . reading_time . isoformat ( ) return { 'stream' : self . stream , 'device_timestamp' : self . raw_time , 'streamer_local_id' : self . reading_id , 'timestamp' : timestamp_str , 'value' : self . value }
Encode the data in this reading into a dictionary .
22,033
def asdict ( self ) : return { 'stream' : self . stream , 'device_timestamp' : self . raw_time , 'streamer_local_id' : self . reading_id , 'timestamp' : self . reading_time , 'extra_data' : self . summary_data , 'data' : self . raw_data }
Encode the data in this event into a dictionary .
22,034
def save ( self , path ) : data = self . encode ( ) with open ( path , "wb" ) as out : out . write ( data )
Save a binary copy of this report
22,035
def serialize ( self ) : info = { } info [ 'received_time' ] = self . received_time info [ 'encoded_report' ] = bytes ( self . encode ( ) ) report_format = info [ 'encoded_report' ] [ 0 ] if not isinstance ( report_format , int ) : report_format = ord ( report_format ) info [ 'report_format' ] = report_format info [ 'origin' ] = self . origin return info
Turn this report into a dictionary that encodes all information including received timestamp
22,036
def get_contents ( self ) : childsigs = [ n . get_csig ( ) for n in self . children ( ) ] return '' . join ( childsigs )
The contents of an alias is the concatenation of the content signatures of all its sources .
22,037
def generate ( env ) : import SCons . Tool import SCons . Tool . cc static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in CXXSuffixes : static_obj . add_action ( suffix , SCons . Defaults . CXXAction ) shared_obj . add_action ( suffix , SCons . Defaults . ShCXXAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) SCons . Tool . cc . add_common_cc_variables ( env ) if 'CXX' not in env : env [ 'CXX' ] = env . Detect ( compilers ) or compilers [ 0 ] env [ 'CXXFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'CXXCOM' ] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES' env [ 'SHCXX' ] = '$CXX' env [ 'SHCXXFLAGS' ] = SCons . Util . CLVar ( '$CXXFLAGS' ) env [ 'SHCXXCOM' ] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES' env [ 'CPPDEFPREFIX' ] = '-D' env [ 'CPPDEFSUFFIX' ] = '' env [ 'INCPREFIX' ] = '-I' env [ 'INCSUFFIX' ] = '' env [ 'SHOBJSUFFIX' ] = '.os' env [ 'OBJSUFFIX' ] = '.o' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 0 env [ 'CXXFILESUFFIX' ] = '.cc'
Add Builders and construction variables for Visual Age C ++ compilers to an Environment .
22,038
def link_to_storage ( self , sensor_log ) : if self . walker is not None : self . _sensor_log . destroy_walker ( self . walker ) self . walker = None self . walker = sensor_log . create_walker ( self . selector ) self . _sensor_log = sensor_log
Attach this DataStreamer to an underlying SensorLog .
22,039
def triggered ( self , manual = False ) : if self . walker is None : raise InternalError ( "You can only check if a streamer is triggered if you create it with a SensorLog" ) if not self . automatic and not manual : return False return self . has_data ( )
Check if this streamer should generate a report .
22,040
def build_report ( self , device_id , max_size = None , device_uptime = 0 , report_id = None , auth_chain = None ) : if self . walker is None or self . index is None : raise InternalError ( "You can only build a report with a DataStreamer if you create it with a SensorLog and a streamer index" ) if self . requires_signing ( ) and auth_chain is None : raise ArgumentError ( "You must pass an auth chain to sign this report." ) if self . requires_id ( ) and report_id is None : raise ArgumentError ( "You must pass a report_id to serialize this report" ) if self . format == 'individual' : reading = self . walker . pop ( ) highest_id = reading . reading_id if self . report_type == 'telegram' : return StreamerReport ( IndividualReadingReport . FromReadings ( device_id , [ reading ] ) , 1 , highest_id ) elif self . report_type == 'broadcast' : return StreamerReport ( BroadcastReport . FromReadings ( device_id , [ reading ] , device_uptime ) , 1 , highest_id ) elif self . format == 'hashedlist' : max_readings = ( max_size - 20 - 24 ) // 16 if max_readings <= 0 : raise InternalError ( "max_size is too small to hold even a single reading" , max_size = max_size ) readings = [ ] highest_id = 0 try : while len ( readings ) < max_readings : reading = self . walker . pop ( ) readings . append ( reading ) if reading . reading_id > highest_id : highest_id = reading . reading_id except StreamEmptyError : if len ( readings ) == 0 : raise return StreamerReport ( SignedListReport . FromReadings ( device_id , readings , report_id = report_id , selector = self . selector . encode ( ) , streamer = self . index , sent_timestamp = device_uptime ) , len ( readings ) , highest_id ) raise InternalError ( "Streamer report format or type is not supported currently" , report_format = self . format , report_type = self . report_type )
Build a report with all of the readings in this streamer .
22,041
def matches ( self , address , name = None ) : if self . controller : return address == 8 return self . address == address
Check if this slot identifier matches the given tile .
22,042
def FromString ( cls , desc ) : desc = str ( desc ) if desc == u'controller' : return SlotIdentifier ( controller = True ) words = desc . split ( ) if len ( words ) != 2 or words [ 0 ] != u'slot' : raise ArgumentError ( u"Illegal slot identifier" , descriptor = desc ) try : slot_id = int ( words [ 1 ] , 0 ) except ValueError : raise ArgumentError ( u"Could not convert slot identifier to number" , descriptor = desc , number = words [ 1 ] ) return SlotIdentifier ( slot = slot_id )
Create a slot identifier from a string description .
22,043
def FromEncoded ( cls , bindata ) : if len ( bindata ) != 8 : raise ArgumentError ( "Invalid binary slot descriptor with invalid length" , length = len ( bindata ) , expected = 8 , data = bindata ) slot , match_op = struct . unpack ( "<B6xB" , bindata ) match_name = cls . KNOWN_MATCH_CODES . get ( match_op ) if match_name is None : raise ArgumentError ( "Unknown match operation specified in binary slot descriptor" , operation = match_op , known_match_ops = cls . KNOWN_MATCH_CODES ) if match_name == 'match_controller' : return SlotIdentifier ( controller = True ) if match_name == 'match_slot' : return SlotIdentifier ( slot = slot ) raise ArgumentError ( "Unsupported match operation in binary slot descriptor" , match_op = match_name )
Create a slot identifier from an encoded binary descriptor .
22,044
def encode ( self ) : slot = 0 match_op = self . KNOWN_MATCH_NAMES [ 'match_controller' ] if not self . controller : slot = self . slot match_op = self . KNOWN_MATCH_NAMES [ 'match_slot' ] return struct . pack ( "<B6xB" , slot , match_op )
Encode this slot identifier into a binary descriptor .
22,045
def _scons_syntax_error ( e ) : etype , value , tb = sys . exc_info ( ) lines = traceback . format_exception_only ( etype , value ) for line in lines : sys . stderr . write ( line + '\n' ) sys . exit ( 2 )
Handle syntax errors . Print out a message and show where the error occurred .
22,046
def find_deepest_user_frame ( tb ) : tb . reverse ( ) for frame in tb : filename = frame [ 0 ] if filename . find ( os . sep + 'SCons' + os . sep ) == - 1 : return frame return tb [ 0 ]
Find the deepest stack frame that is not part of SCons .
22,047
def _scons_user_error ( e ) : global print_stacktrace etype , value , tb = sys . exc_info ( ) if print_stacktrace : traceback . print_exception ( etype , value , tb ) filename , lineno , routine , dummy = find_deepest_user_frame ( traceback . extract_tb ( tb ) ) sys . stderr . write ( "\nscons: *** %s\n" % value ) sys . stderr . write ( 'File "%s", line %d, in %s\n' % ( filename , lineno , routine ) ) sys . exit ( 2 )
Handle user errors . Print out a message and a description of the error along with the line number and routine where it occured . The file and line number will be the deepest stack frame that is not part of SCons itself .
22,048
def _scons_user_warning ( e ) : etype , value , tb = sys . exc_info ( ) filename , lineno , routine , dummy = find_deepest_user_frame ( traceback . extract_tb ( tb ) ) sys . stderr . write ( "\nscons: warning: %s\n" % e ) sys . stderr . write ( 'File "%s", line %d, in %s\n' % ( filename , lineno , routine ) )
Handle user warnings . Print out a message and a description of the warning along with the line number and routine where it occured . The file and line number will be the deepest stack frame that is not part of SCons itself .
22,049
def _SConstruct_exists ( dirname = '' , repositories = [ ] , filelist = None ) : if not filelist : filelist = [ 'SConstruct' , 'Sconstruct' , 'sconstruct' ] for file in filelist : sfile = os . path . join ( dirname , file ) if os . path . isfile ( sfile ) : return sfile if not os . path . isabs ( sfile ) : for rep in repositories : if os . path . isfile ( os . path . join ( rep , sfile ) ) : return sfile return None
This function checks that an SConstruct file exists in a directory . If so it returns the path of the file . By default it checks the current directory .
22,050
def make_ready ( self ) : SCons . Taskmaster . OutOfDateTask . make_ready ( self ) if self . out_of_date and self . options . debug_explain : explanation = self . out_of_date [ 0 ] . explain ( ) if explanation : sys . stdout . write ( "scons: " + explanation )
Make a task ready for execution
22,051
def _unpack_version ( tag_data ) : tag = tag_data & ( ( 1 << 20 ) - 1 ) version_data = tag_data >> 20 major = ( version_data >> 6 ) & ( ( 1 << 6 ) - 1 ) minor = ( version_data >> 0 ) & ( ( 1 << 6 ) - 1 ) return ( tag , "{}.{}" . format ( major , minor ) )
Parse a packed version info struct into tag and major . minor version .
22,052
def _handle_reset ( self ) : self . _logger . info ( "Resetting controller" ) self . _device . reset_count += 1 super ( ReferenceController , self ) . _handle_reset ( ) self . reset_config_variables ( )
Reset this controller tile .
22,053
async def _reset_vector ( self ) : config_rpcs = self . config_database . stream_matching ( 8 , self . name ) for rpc in config_rpcs : await self . _device . emulator . await_rpc ( * rpc ) config_assignments = self . latch_config_variables ( ) self . _logger . info ( "Latched config variables at reset for controller: %s" , config_assignments ) for system in self . _post_config_subsystems : try : system . clear_to_reset ( config_assignments ) await asyncio . wait_for ( system . initialize ( ) , timeout = 2.0 ) except : self . _logger . exception ( "Error initializing %s" , system ) raise self . _logger . info ( "Finished clearing controller to reset condition" ) for address , _ in self . _device . iter_tiles ( include_controller = False ) : self . _logger . info ( "Sending reset signal to tile at address %d" , address ) try : await self . _device . emulator . await_rpc ( address , rpcs . RESET ) except TileNotFoundError : pass except : self . _logger . exception ( "Error sending reset signal to tile at address %d" , address ) raise self . initialized . set ( )
Initialize the controller s subsystems inside the emulation thread .
22,054
def hardware_version ( self ) : hardware_string = self . hardware_string if not isinstance ( hardware_string , bytes ) : hardware_string = self . hardware_string . encode ( 'utf-8' ) if len ( hardware_string ) > 10 : self . _logger . warn ( "Truncating hardware string that was longer than 10 bytes: %s" , self . hardware_string ) if len ( hardware_string ) < 10 : hardware_string += b'\0' * ( 10 - len ( hardware_string ) ) return [ hardware_string ]
Get a hardware identification string .
22,055
def controller_info ( self ) : return [ self . _device . iotile_id , _pack_version ( * self . os_info ) , _pack_version ( * self . app_info ) ]
Get the controller UUID app tag and os tag .
22,056
def load_sgf ( self , sgf_data ) : if '\n' not in sgf_data : with open ( sgf_data , "r" ) as infile : sgf_data = infile . read ( ) model = DeviceModel ( ) parser = SensorGraphFileParser ( ) parser . parse_file ( data = sgf_data ) parser . compile ( model ) opt = SensorGraphOptimizer ( ) opt . optimize ( parser . sensor_graph , model = model ) sensor_graph = parser . sensor_graph self . _logger . info ( "Loading sensor_graph with %d nodes, %d streamers and %d configs" , len ( sensor_graph . nodes ) , len ( sensor_graph . streamers ) , len ( sensor_graph . config_database ) ) self . sensor_graph . persisted_nodes = sensor_graph . dump_nodes ( ) self . sensor_graph . persisted_streamers = sensor_graph . dump_streamers ( ) self . sensor_graph . persisted_constants = [ ] for stream , value in sorted ( sensor_graph . constant_database . items ( ) , key = lambda x : x [ 0 ] . encode ( ) ) : reading = IOTileReading ( stream . encode ( ) , 0 , value ) self . sensor_graph . persisted_constants . append ( ( stream , reading ) ) self . sensor_graph . persisted_exists = True self . config_database . clear ( ) for slot in sorted ( sensor_graph . config_database , key = lambda x : x . encode ( ) ) : for conf_var , ( conf_type , conf_val ) in sorted ( sensor_graph . config_database [ slot ] . items ( ) ) : self . config_database . add_direct ( slot , conf_var , conf_type , conf_val ) app_tag = sensor_graph . metadata_database . get ( 'app_tag' ) app_version = sensor_graph . metadata_database . get ( 'app_version' ) if app_tag is not None : if app_version is None : app_version = "0.0" self . app_info = ( app_tag , app_version )
Load persist a sensor_graph file .
22,057
def _parse_file ( self ) : args = utilities . build_includes ( self . arch . includes ( ) ) args . append ( '-E' ) args . append ( '-D__attribute__(x)=' ) args . append ( '-D__extension__=' ) self . ast = parse_file ( self . filepath , use_cpp = True , cpp_path = 'arm-none-eabi-gcc' , cpp_args = args )
Preprocess and parse C file into an AST
22,058
def _clear_queue ( to_clear ) : while not to_clear . empty ( ) : try : to_clear . get ( False ) to_clear . task_done ( ) except queue . Empty : continue
Clear all items from a queue safely .
22,059
def finish ( self , status , response ) : self . response = binascii . hexlify ( response ) . decode ( 'utf-8' ) self . status = status self . runtime = monotonic ( ) - self . _start_time
Mark the end of a recorded RPC .
22,060
def serialize ( self ) : return "{},{: <26},{:2d},{:#06x},{:#04x},{:5.0f},{: <40},{: <40},{}" . format ( self . connection , self . start_stamp . isoformat ( ) , self . address , self . rpc_id , self . status , self . runtime * 1000 , self . call , self . response , self . error )
Convert this recorded RPC into a string .
22,061
def scan ( self , wait = None ) : min_scan = self . adapter . get_config ( 'minimum_scan_time' , 0.0 ) probe_required = self . adapter . get_config ( 'probe_required' , False ) wait_time = None elapsed = monotonic ( ) - self . _start_time if elapsed < min_scan : wait_time = min_scan - elapsed if probe_required : self . _loop . run_coroutine ( self . adapter . probe ( ) ) wait_time = min_scan if wait is not None : wait_time = wait if wait_time is not None : sleep ( wait_time ) to_remove = set ( ) now = monotonic ( ) with self . _scan_lock : for name , value in self . _scanned_devices . items ( ) : if value [ 'expiration_time' ] < now : to_remove . add ( name ) for name in to_remove : del self . _scanned_devices [ name ] devices = sorted ( self . _scanned_devices . values ( ) , key = lambda x : x [ 'uuid' ] ) return devices
Return the devices that have been found for this device adapter .
22,062
def connect ( self , uuid_value , wait = None ) : if self . connected : raise HardwareError ( "Cannot connect when we are already connected" ) if uuid_value not in self . _scanned_devices : self . scan ( wait = wait ) with self . _scan_lock : if uuid_value not in self . _scanned_devices : raise HardwareError ( "Could not find device to connect to by UUID" , uuid = uuid_value ) connstring = self . _scanned_devices [ uuid_value ] [ 'connection_string' ] self . connect_direct ( connstring )
Connect to a specific device by its uuid
22,063
def connect_direct ( self , connection_string , no_rpc = False , force = False ) : if not force and self . connected : raise HardwareError ( "Cannot connect when we are already connected to '%s'" % self . connection_string ) self . _loop . run_coroutine ( self . adapter . connect ( 0 , connection_string ) ) try : if no_rpc : self . _logger . info ( "Not opening RPC interface on device %s" , self . connection_string ) else : self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'rpc' ) ) except HardwareError as exc : self . _logger . exception ( "Error opening RPC interface on device %s" , connection_string ) self . _loop . run_coroutine ( self . adapter . disconnect ( 0 ) ) raise exc except Exception as exc : self . _logger . exception ( "Error opening RPC interface on device %s" , connection_string ) self . _loop . run_coroutine ( self . adapter . disconnect ( 0 ) ) raise HardwareError ( "Could not open RPC interface on device due to an exception: %s" % str ( exc ) ) from exc self . connected = True self . connection_string = connection_string self . connection_interrupted = False
Directly connect to a device using its stream specific connection string .
22,064
def disconnect ( self ) : if not self . connected : raise HardwareError ( "Cannot disconnect when we are not connected" ) self . _reports = None self . _traces = None self . _loop . run_coroutine ( self . adapter . disconnect ( 0 ) ) self . connected = False self . connection_interrupted = False self . connection_string = None
Disconnect from the device that we are currently connected to .
22,065
def _try_reconnect ( self ) : try : if self . connection_interrupted : self . connect_direct ( self . connection_string , force = True ) self . connection_interrupted = False self . connected = True if self . _reports is not None : self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'streaming' ) ) if self . _traces is not None : self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'tracing' ) ) except HardwareError as exc : self . _logger . exception ( "Error reconnecting to device after an unexpected disconnect" ) raise HardwareError ( "Device disconnected unexpectedly and we could not reconnect" , reconnect_error = exc ) from exc
Try to recover an interrupted connection .
22,066
def send_rpc ( self , address , rpc_id , call_payload , timeout = 3.0 ) : if not self . connected : raise HardwareError ( "Cannot send an RPC if we are not in a connected state" ) if timeout is None : timeout = 3.0 status = - 1 payload = b'' recording = None if self . connection_interrupted : self . _try_reconnect ( ) if self . _record is not None : recording = _RecordedRPC ( self . connection_string , address , rpc_id , call_payload ) recording . start ( ) try : payload = self . _loop . run_coroutine ( self . adapter . send_rpc ( 0 , address , rpc_id , call_payload , timeout ) ) status , payload = pack_rpc_response ( payload , None ) except VALID_RPC_EXCEPTIONS as exc : status , payload = pack_rpc_response ( payload , exc ) if self . _record is not None : recording . finish ( status , payload ) self . _recording . append ( recording ) if self . connection_interrupted : self . _try_reconnect ( ) return unpack_rpc_response ( status , payload , rpc_id , address )
Send an rpc to our connected device .
22,067
def send_highspeed ( self , data , progress_callback ) : if not self . connected : raise HardwareError ( "Cannot send a script if we are not in a connected state" ) if isinstance ( data , str ) and not isinstance ( data , bytes ) : raise ArgumentError ( "You must send bytes or bytearray to _send_highspeed" , type = type ( data ) ) if not isinstance ( data , bytes ) : data = bytes ( data ) try : self . _on_progress = progress_callback self . _loop . run_coroutine ( self . adapter . send_script ( 0 , data ) ) finally : self . _on_progress = None
Send a script to a device at highspeed reporting progress .
22,068
def enable_streaming ( self ) : if not self . connected : raise HardwareError ( "Cannot enable streaming if we are not in a connected state" ) if self . _reports is not None : _clear_queue ( self . _reports ) return self . _reports self . _reports = queue . Queue ( ) self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'streaming' ) ) return self . _reports
Open the streaming interface and accumute reports in a queue .
22,069
def enable_tracing ( self ) : if not self . connected : raise HardwareError ( "Cannot enable tracing if we are not in a connected state" ) if self . _traces is not None : _clear_queue ( self . _traces ) return self . _traces self . _traces = queue . Queue ( ) self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'tracing' ) ) return self . _traces
Open the tracing interface and accumulate traces in a queue .
22,070
def enable_broadcasting ( self ) : if self . _broadcast_reports is not None : _clear_queue ( self . _broadcast_reports ) return self . _broadcast_reports self . _broadcast_reports = queue . Queue ( ) return self . _broadcast_reports
Begin accumulating broadcast reports received from all devices .
22,071
def enable_debug ( self ) : if not self . connected : raise HardwareError ( "Cannot enable debug if we are not in a connected state" ) self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'debug' ) )
Open the debug interface on the connected device .
22,072
def debug_command ( self , cmd , args = None , progress_callback = None ) : if args is None : args = { } try : self . _on_progress = progress_callback return self . _loop . run_coroutine ( self . adapter . debug ( 0 , cmd , args ) ) finally : self . _on_progress = None
Send a debug command to the connected device .
22,073
def close ( self ) : try : self . _loop . run_coroutine ( self . adapter . stop ( ) ) finally : self . _save_recording ( )
Close this adapter stream .
22,074
def _on_scan ( self , info ) : device_id = info [ 'uuid' ] expiration_time = info . get ( 'validity_period' , 60 ) infocopy = deepcopy ( info ) infocopy [ 'expiration_time' ] = monotonic ( ) + expiration_time with self . _scan_lock : self . _scanned_devices [ device_id ] = infocopy
Callback called when a new device is discovered on this CMDStream
22,075
def _on_disconnect ( self ) : self . _logger . info ( "Connection to device %s was interrupted" , self . connection_string ) self . connection_interrupted = True
Callback when a device is disconnected unexpectedly .
22,076
def midl_emitter ( target , source , env ) : base , _ = SCons . Util . splitext ( str ( target [ 0 ] ) ) tlb = target [ 0 ] incl = base + '.h' interface = base + '_i.c' targets = [ tlb , incl , interface ] midlcom = env [ 'MIDLCOM' ] if midlcom . find ( '/proxy' ) != - 1 : proxy = base + '_p.c' targets . append ( proxy ) if midlcom . find ( '/dlldata' ) != - 1 : dlldata = base + '_data.c' targets . append ( dlldata ) return ( targets , source )
Produces a list of outputs from the MIDL compiler
22,077
def generate ( env ) : env [ 'MIDL' ] = 'MIDL.EXE' env [ 'MIDLFLAGS' ] = SCons . Util . CLVar ( '/nologo' ) env [ 'MIDLCOM' ] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL' env [ 'BUILDERS' ] [ 'TypeLibrary' ] = midl_builder
Add Builders and construction variables for midl to an Environment .
22,078
def set_entry ( self , filename , obj ) : self . entries [ filename ] = obj self . dirty = True
Set the entry .
22,079
def write ( self , sync = 1 ) : if not self . dirty : return self . merge ( ) temp = os . path . join ( self . dir . get_internal_path ( ) , '.scons%d' % os . getpid ( ) ) try : file = open ( temp , 'wb' ) fname = temp except IOError : try : file = open ( self . sconsign , 'wb' ) fname = self . sconsign except IOError : return for key , entry in self . entries . items ( ) : entry . convert_to_sconsign ( ) pickle . dump ( self . entries , file , PICKLE_PROTOCOL ) file . close ( ) if fname != self . sconsign : try : mode = os . stat ( self . sconsign ) [ 0 ] os . chmod ( self . sconsign , 0o666 ) os . unlink ( self . sconsign ) except ( IOError , OSError ) : pass try : os . rename ( fname , self . sconsign ) except OSError : open ( self . sconsign , 'wb' ) . write ( open ( fname , 'rb' ) . read ( ) ) os . chmod ( self . sconsign , mode ) try : os . unlink ( temp ) except ( IOError , OSError ) : pass
Write the . sconsign file to disk .
22,080
def generate ( env ) : link . generate ( env ) env [ 'LINK' ] = env . Detect ( linkers ) or 'cc' env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -shared' ) env [ 'RPATHPREFIX' ] = '-rpath ' env [ 'RPATHSUFFIX' ] = '' env [ '_RPATH' ] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
Add Builders and construction variables for MIPSPro to an Environment .
22,081
async def start ( self ) : await self . server . start ( ) self . port = self . server . port
Start the supervisor server .
22,082
async def prepare_conn ( self , conn ) : client_id = str ( uuid . uuid4 ( ) ) monitor = functools . partial ( self . send_event , client_id ) self . _logger . info ( "New client connection: %s" , client_id ) self . service_manager . add_monitor ( monitor ) self . clients [ client_id ] = dict ( connection = conn , monitor = monitor ) return client_id
Setup a new connection from a client .
22,083
async def teardown_conn ( self , context ) : client_id = context . user_data self . _logger . info ( "Tearing down client connection: %s" , client_id ) if client_id not in self . clients : self . _logger . warning ( "client_id %s did not exist in teardown_conn" , client_id ) else : del self . clients [ client_id ]
Teardown a connection from a client .
22,084
async def send_event ( self , client_id , service_name , event_name , event_info , directed_client = None ) : if directed_client is not None and directed_client != client_id : return client_info = self . clients . get ( client_id ) if client_info is None : self . _logger . warning ( "Attempted to send event to invalid client id: %s" , client_id ) return conn = client_info [ 'connection' ] event = dict ( service = service_name ) if event_info is not None : event [ 'payload' ] = event_info self . _logger . debug ( "Sending event: %s" , event ) await self . server . send_event ( conn , event_name , event )
Send an event to a client .
22,085
async def send_rpc ( self , msg , _context ) : service = msg . get ( 'name' ) rpc_id = msg . get ( 'rpc_id' ) payload = msg . get ( 'payload' ) timeout = msg . get ( 'timeout' ) response_id = await self . service_manager . send_rpc_command ( service , rpc_id , payload , timeout ) try : result = await self . service_manager . rpc_results . get ( response_id , timeout = timeout ) except asyncio . TimeoutError : self . _logger . warning ( "RPC 0x%04X on service %s timed out after %f seconds" , rpc_id , service , timeout ) result = dict ( result = 'timeout' , response = b'' ) return result
Send an RPC to a service on behalf of a client .
22,086
async def respond_rpc ( self , msg , _context ) : rpc_id = msg . get ( 'response_uuid' ) result = msg . get ( 'result' ) payload = msg . get ( 'response' ) self . service_manager . send_rpc_response ( rpc_id , result , payload )
Respond to an RPC previously sent to a service .
22,087
async def set_agent ( self , msg , context ) : service = msg . get ( 'name' ) client = context . user_data self . service_manager . set_agent ( service , client )
Mark a client as the RPC agent for a service .
22,088
async def service_messages ( self , msg , _context ) : msgs = self . service_manager . service_messages ( msg . get ( 'name' ) ) return [ x . to_dict ( ) for x in msgs ]
Get all messages for a service .
22,089
async def service_headline ( self , msg , _context ) : headline = self . service_manager . service_headline ( msg . get ( 'name' ) ) if headline is not None : headline = headline . to_dict ( ) return headline
Get the headline for a service .
22,090
def generate ( env ) : static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in ASSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) for suffix in ASPPSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASPPAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) env [ 'AS' ] = 'nasm' env [ 'ASFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'ASPPFLAGS' ] = '$ASFLAGS' env [ 'ASCOM' ] = '$AS $ASFLAGS -o $TARGET $SOURCES' env [ 'ASPPCOM' ] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
Add Builders and construction variables for nasm to an Environment .
22,091
def generate ( env ) : link . generate ( env ) env [ 'SHLINKFLAGS' ] = SCons . Util . CLVar ( '$LINKFLAGS -G' ) env [ 'RPATHPREFIX' ] = '-R' env [ 'RPATHSUFFIX' ] = '' env [ '_RPATH' ] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' link . _setup_versioned_lib_variables ( env , tool = 'sunlink' , use_soname = True ) env [ 'LINKCALLBACKS' ] = link . _versioned_lib_callbacks ( )
Add Builders and construction variables for Forte to an Environment .
22,092
def _get_short_description ( self ) : if self . description is None : return None lines = [ x for x in self . description . split ( '\n' ) ] if len ( lines ) == 1 : return lines [ 0 ] elif len ( lines ) >= 3 and lines [ 1 ] == '' : return lines [ 0 ] return None
Return the first line of a multiline description
22,093
def _get_long_description ( self ) : if self . description is None : return None lines = [ x for x in self . description . split ( '\n' ) ] if len ( lines ) == 1 : return None elif len ( lines ) >= 3 and lines [ 1 ] == '' : return '\n' . join ( lines [ 2 : ] ) return self . description
Return the subsequent lines of a multiline description
22,094
def wrap_lines ( self , text , indent_level , indent_size = 4 ) : indent = ' ' * indent_size * indent_level lines = text . split ( '\n' ) wrapped_lines = [ ] for line in lines : if line == '' : wrapped_lines . append ( line ) else : wrapped_lines . append ( indent + line ) return '\n' . join ( wrapped_lines )
Indent a multiline string
22,095
def format_name ( self , name , indent_size = 4 ) : name_block = '' if self . short_desc is None : name_block += name + '\n' else : name_block += name + ': ' + self . short_desc + '\n' if self . long_desc is not None : name_block += self . wrap_lines ( self . long_desc , 1 , indent_size = indent_size ) name_block += '\n' return name_block
Format the name of this verifier
22,096
def trim_whitespace ( self , text ) : lines = text . split ( '\n' ) new_lines = [ x . lstrip ( ) for x in lines ] return '\n' . join ( new_lines )
Remove leading whitespace from each line of a multiline string
22,097
def __extend_targets_sources ( target , source ) : if not SCons . Util . is_List ( target ) : target = [ target ] if not source : source = target [ : ] elif not SCons . Util . is_List ( source ) : source = [ source ] if len ( target ) < len ( source ) : target . extend ( source [ len ( target ) : ] ) return target , source
Prepare the lists of target and source files .
22,098
def __select_builder ( lxml_builder , libxml2_builder , cmdline_builder ) : if prefer_xsltproc : return cmdline_builder if not has_libxml2 : if has_lxml : return lxml_builder else : return cmdline_builder return libxml2_builder
Selects a builder based on which Python modules are present .
22,099
def __ensure_suffix ( t , suffix ) : tpath = str ( t ) if not tpath . endswith ( suffix ) : return tpath + suffix return t
Ensure that the target t has the given suffix .