idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
21,900 | def probe_async ( self , callback ) : def _on_finished ( _name , control_info , exception ) : if exception is not None : callback ( self . id , False , str ( exception ) ) return self . _control_info = control_info try : info = { 'connection_string' : "direct" , 'uuid' : control_info . uuid , 'signal_strength' : 100 } self . _trigger_callback ( 'on_scan' , self . id , info , self . ExpirationTime ) finally : callback ( self . id , True , None ) self . _control_thread . command ( JLinkControlThread . FIND_CONTROL , _on_finished , self . _device_info . ram_start , self . _device_info . ram_size ) | Send advertisements for all connected devices . |
21,901 | def _open_debug_interface ( self , conn_id , callback , connection_string = None ) : self . _try_connect ( connection_string ) callback ( conn_id , self . id , True , None ) | Enable debug interface for this IOTile device |
21,902 | async def _reset_vector ( self ) : self . _logger . info ( "Tile %s at address %d is starting from reset" , self . name , self . address ) try : address , run_level , debug = await self . _device . emulator . await_rpc ( 8 , rpcs . REGISTER_TILE , * self . _registration_tuple ( ) ) except : self . _logger . exception ( "Error registering tile: address=%d, name=%s" , self . address , self . name ) raise self . debug_mode = bool ( debug ) self . run_level = run_level self . _logger . info ( "Tile at address %d registered itself, received address=%d, runlevel=%d and debug=%d" , self . address , address , run_level , debug ) self . _registered . set ( ) if run_level == RunLevel . SAFE_MODE : self . initialized . set ( ) return if run_level == RunLevel . START_ON_COMMAND : await self . _start_received . wait ( ) self . _hosted_app_running . set ( ) await self . _application_main ( ) | Main background task for the tile executive . |
21,903 | def _handle_reset ( self ) : self . _registered . clear ( ) self . _start_received . clear ( ) self . _hosted_app_running . clear ( ) super ( EmulatedPeripheralTile , self ) . _handle_reset ( ) | Reset this tile . |
21,904 | async def start ( self ) : self . _logger . info ( "Starting all device adapters" ) await self . device_manager . start ( ) self . _logger . info ( "Starting all servers" ) for server in self . servers : await server . start ( ) | Start the gateway . |
21,905 | async def stop ( self ) : self . _logger . info ( "Stopping all servers" ) for server in self . servers : await server . stop ( ) self . _logger . info ( "Stopping all device adapters" ) await self . device_manager . stop ( ) | Stop the gateway manager and synchronously wait for it to stop . |
21,906 | def main ( argv = None ) : if argv is None : argv = sys . argv [ 1 : ] parser = build_args ( ) args = parser . parse_args ( args = argv ) recipe_name , _ext = os . path . splitext ( os . path . basename ( args . recipe ) ) rm = RecipeManager ( ) rm . add_recipe_folder ( os . path . dirname ( args . recipe ) , whitelist = [ os . path . basename ( args . recipe ) ] ) recipe = rm . get_recipe ( recipe_name ) if args . archive is not None : print ( "Archiving recipe into %s" % args . archive ) recipe . archive ( args . archive ) return 0 if args . info : print ( recipe ) return 0 variables = load_variables ( args . define , args . config ) success = 0 start_time = time . time ( ) if args . loop is None : try : recipe . run ( variables ) success += 1 except IOTileException as exc : print ( "Error running recipe: %s" % str ( exc ) ) return 1 else : while True : value = input ( "Enter value for loop variable %s (return to stop): " % args . loop ) if value == '' : break local_vars = dict ( ** variables ) local_vars [ args . loop ] = value try : recipe . run ( local_vars ) success += 1 except IOTileException as exc : print ( " % ( value , str ( exc ) ) ) end_time = time . time ( ) total_time = end_time - start_time if success == 0 : per_time = 0.0 else : per_time = total_time / success print ( "Performed %d runs in %.1f seconds (%.1f seconds / run)" % ( success , total_time , per_time ) ) return 0 | Main entry point for iotile - ship recipe runner . |
21,907 | def subst_dict ( target , source ) : dict = { } if target : def get_tgt_subst_proxy ( thing ) : try : subst_proxy = thing . get_subst_proxy ( ) except AttributeError : subst_proxy = thing return subst_proxy tnl = NLWrapper ( target , get_tgt_subst_proxy ) dict [ 'TARGETS' ] = Targets_or_Sources ( tnl ) dict [ 'TARGET' ] = Target_or_Source ( tnl ) dict [ 'CHANGED_TARGETS' ] = '$TARGETS' dict [ 'UNCHANGED_TARGETS' ] = '$TARGETS' else : dict [ 'TARGETS' ] = NullNodesList dict [ 'TARGET' ] = NullNodesList if source : def get_src_subst_proxy ( node ) : try : rfile = node . rfile except AttributeError : pass else : node = rfile ( ) try : return node . get_subst_proxy ( ) except AttributeError : return node snl = NLWrapper ( source , get_src_subst_proxy ) dict [ 'SOURCES' ] = Targets_or_Sources ( snl ) dict [ 'SOURCE' ] = Target_or_Source ( snl ) dict [ 'CHANGED_SOURCES' ] = '$SOURCES' dict [ 'UNCHANGED_SOURCES' ] = '$SOURCES' else : dict [ 'SOURCES' ] = NullNodesList dict [ 'SOURCE' ] = NullNodesList return dict | Create a dictionary for substitution of special construction variables . |
21,908 | def escape ( self , escape_func , quote_func = quote_spaces ) : if self . is_literal ( ) : return escape_func ( self . data ) elif ' ' in self . data or '\t' in self . data : return quote_func ( self . data ) else : return self . data | Escape the string with the supplied function . The function is expected to take an arbitrary string then return it with all special characters escaped and ready for passing to the command interpreter . |
21,909 | def indent_list ( inlist , level ) : indent = ' ' * level joinstr = '\n' + indent retval = joinstr . join ( inlist ) return indent + retval | Join a list of strings one per line with level spaces before each one |
21,910 | def generate ( env ) : fortran . generate ( env ) for dialect in [ 'F77' , 'F90' , 'FORTRAN' , 'F95' , 'F03' , 'F08' ] : env [ '%s' % dialect ] = 'gfortran' env [ 'SH%s' % dialect ] = '$%s' % dialect if env [ 'PLATFORM' ] in [ 'cygwin' , 'win32' ] : env [ 'SH%sFLAGS' % dialect ] = SCons . Util . CLVar ( '$%sFLAGS' % dialect ) else : env [ 'SH%sFLAGS' % dialect ] = SCons . Util . CLVar ( '$%sFLAGS -fPIC' % dialect ) env [ 'INC%sPREFIX' % dialect ] = "-I" env [ 'INC%sSUFFIX' % dialect ] = "" | Add Builders and construction variables for gfortran to an Environment . |
21,911 | def _extract_device_uuid ( cls , slug ) : if len ( slug ) != 22 : raise ArgumentError ( "Invalid device slug" , slug = slug ) hexdigits = slug [ 3 : ] hexdigits = hexdigits . replace ( '-' , '' ) try : rawbytes = binascii . unhexlify ( hexdigits ) words = struct . unpack ( ">LL" , rawbytes ) return ( words [ 0 ] << 32 ) | ( words [ 1 ] ) except ValueError as exc : raise ArgumentError ( "Could not convert device slug to hex integer" , slug = slug , error = str ( exc ) ) | Turn a string slug into a UUID |
21,912 | def start ( self ) : self . _prepare ( ) self . _disconnector = tornado . ioloop . PeriodicCallback ( self . _disconnect_hanging_devices , 1000 , self . _loop ) self . _disconnector . start ( ) | Start this gateway agent . |
21,913 | def stop ( self ) : if self . _disconnector : self . _disconnector . stop ( ) self . client . disconnect ( ) | Stop this gateway agent . |
21,914 | def _validate_connection ( self , action , uuid , key ) : if uuid not in self . _connections : self . _logger . warn ( "Received message for device with no connection 0x%X" , uuid ) return None data = self . _connections [ uuid ] if key != data [ 'key' ] : self . _logger . warn ( "Received message for device with incorrect key, uuid=0x%X" , uuid ) return None return data [ 'connection_id' ] | Validate that a message received for a device has the right key |
21,915 | def _publish_status ( self , slug , data ) : status_topic = self . topics . prefix + 'devices/{}/data/status' . format ( slug ) self . _logger . debug ( "Publishing status message: (topic=%s) (message=%s)" , status_topic , str ( data ) ) self . client . publish ( status_topic , data ) | Publish a status message for a device |
21,916 | def _publish_response ( self , slug , message ) : resp_topic = self . topics . gateway_topic ( slug , 'data/response' ) self . _logger . debug ( "Publishing response message: (topic=%s) (message=%s)" , resp_topic , message ) self . client . publish ( resp_topic , message ) | Publish a response message for a device |
21,917 | def _on_action ( self , sequence , topic , message ) : try : slug = None parts = topic . split ( '/' ) slug = parts [ - 3 ] uuid = self . _extract_device_uuid ( slug ) except Exception as exc : self . _logger . warn ( "Error parsing slug in action handler (slug=%s, topic=%s)" , slug , topic ) return if messages . DisconnectCommand . matches ( message ) : self . _logger . debug ( "Received disconnect command for device 0x%X" , uuid ) key = message [ 'key' ] client = message [ 'client' ] self . _loop . add_callback ( self . _disconnect_from_device , uuid , key , client ) elif messages . OpenInterfaceCommand . matches ( message ) or messages . CloseInterfaceCommand . matches ( message ) : self . _logger . debug ( "Received %s command for device 0x%X" , message [ 'operation' ] , uuid ) key = message [ 'key' ] client = message [ 'client' ] oper = message [ 'operation' ] if oper == 'open_interface' : self . _loop . add_callback ( self . _open_interface , client , uuid , message [ 'interface' ] , key ) else : self . _loop . add_callback ( self . _close_interface , client , uuid , message [ 'interface' ] , key ) elif messages . RPCCommand . matches ( message ) : rpc_msg = messages . RPCCommand . verify ( message ) client = rpc_msg [ 'client' ] address = rpc_msg [ 'address' ] rpc = rpc_msg [ 'rpc_id' ] payload = rpc_msg [ 'payload' ] key = rpc_msg [ 'key' ] timeout = rpc_msg [ 'timeout' ] self . _loop . add_callback ( self . _send_rpc , client , uuid , address , rpc , payload , timeout , key ) elif messages . ScriptCommand . matches ( message ) : script_msg = messages . ScriptCommand . verify ( message ) key = script_msg [ 'key' ] client = script_msg [ 'client' ] script = script_msg [ 'script' ] self . _loop . add_callback ( self . _send_script , client , uuid , script , key , ( script_msg [ 'fragment_index' ] , script_msg [ 'fragment_count' ] ) ) else : self . _logger . error ( "Unsupported message received (topic=%s) (message=%s)" , topic , str ( message ) ) | Process a command action that we received on behalf of a device . |
21,918 | def _on_connect ( self , sequence , topic , message ) : try : slug = None parts = topic . split ( '/' ) slug = parts [ - 3 ] uuid = self . _extract_device_uuid ( slug ) except Exception : self . _logger . exception ( "Error parsing slug from connection request (slug=%s, topic=%s)" , slug , topic ) return if messages . ConnectCommand . matches ( message ) : key = message [ 'key' ] client = message [ 'client' ] self . _loop . add_callback ( self . _connect_to_device , uuid , key , client ) else : self . _logger . warn ( "Unknown message received on connect topic=%s, message=%s" , topic , message ) | Process a request to connect to an IOTile device |
21,919 | def _send_rpc ( self , client , uuid , address , rpc , payload , timeout , key ) : conn_id = self . _validate_connection ( 'send_rpc' , uuid , key ) if conn_id is None : return conn_data = self . _connections [ uuid ] conn_data [ 'last_touch' ] = monotonic ( ) slug = self . _build_device_slug ( uuid ) try : resp = yield self . _manager . send_rpc ( conn_id , address , rpc >> 8 , rpc & 0xFF , bytes ( payload ) , timeout ) except Exception as exc : self . _logger . error ( "Error in manager send rpc: %s" % str ( exc ) ) resp = { 'success' : False , 'reason' : "Internal error: %s" % str ( exc ) } payload = { 'client' : client , 'type' : 'response' , 'operation' : 'rpc' } payload [ 'success' ] = resp [ 'success' ] if resp [ 'success' ] is False : payload [ 'failure_reason' ] = resp [ 'reason' ] else : payload [ 'status' ] = resp [ 'status' ] payload [ 'payload' ] = binascii . hexlify ( resp [ 'payload' ] ) self . _publish_response ( slug , payload ) | Send an RPC to a connected device |
21,920 | def _send_script ( self , client , uuid , chunk , key , chunk_status ) : conn_id = self . _validate_connection ( 'send_script' , uuid , key ) if conn_id is None : return conn_data = self . _connections [ uuid ] conn_data [ 'last_touch' ] = monotonic ( ) slug = self . _build_device_slug ( uuid ) index , count = chunk_status if index == 0 : conn_data [ 'script' ] = bytes ( ) conn_data [ 'script' ] += chunk if index != count - 1 : return conn_data [ 'last_progress' ] = None try : resp = yield self . _manager . send_script ( conn_id , conn_data [ 'script' ] , lambda x , y : self . _notify_progress_async ( uuid , client , x , y ) ) yield None conn_data [ 'script' ] = bytes ( ) except Exception as exc : self . _logger . exception ( "Error in manager send_script" ) resp = { 'success' : False , 'reason' : "Internal error: %s" % str ( exc ) } payload = { 'client' : client , 'type' : 'response' , 'operation' : 'send_script' , 'success' : resp [ 'success' ] } if resp [ 'success' ] is False : payload [ 'failure_reason' ] = resp [ 'reason' ] self . _publish_response ( slug , payload ) | Send a script to the connected device . |
21,921 | def _open_interface ( self , client , uuid , iface , key ) : conn_id = self . _validate_connection ( 'open_interface' , uuid , key ) if conn_id is None : return conn_data = self . _connections [ uuid ] conn_data [ 'last_touch' ] = monotonic ( ) slug = self . _build_device_slug ( uuid ) try : resp = yield self . _manager . open_interface ( conn_id , iface ) except Exception as exc : self . _logger . exception ( "Error in manager open interface" ) resp = { 'success' : False , 'reason' : "Internal error: %s" % str ( exc ) } message = { 'type' : 'response' , 'operation' : 'open_interface' , 'client' : client } message [ 'success' ] = resp [ 'success' ] if not message [ 'success' ] : message [ 'failure_reason' ] = resp [ 'reason' ] self . _publish_response ( slug , message ) | Open an interface on a connected device . |
21,922 | def _disconnect_hanging_devices ( self ) : now = monotonic ( ) for uuid , data in self . _connections . items ( ) : if ( now - data [ 'last_touch' ] ) > self . client_timeout : self . _logger . info ( "Disconnect inactive client %s from device 0x%X" , data [ 'client' ] , uuid ) self . _loop . add_callback ( self . _disconnect_from_device , uuid , data [ 'key' ] , data [ 'client' ] , unsolicited = True ) | Periodic callback that checks for devices that haven t been used and disconnects them . |
21,923 | def _disconnect_from_device ( self , uuid , key , client , unsolicited = False ) : conn_id = self . _validate_connection ( 'disconnect' , uuid , key ) if conn_id is None : return conn_data = self . _connections [ uuid ] slug = self . _build_device_slug ( uuid ) message = { 'client' : client , 'type' : 'response' , 'operation' : 'disconnect' } self . client . reset_sequence ( self . topics . gateway_topic ( slug , 'control/connect' ) ) self . client . reset_sequence ( self . topics . gateway_topic ( slug , 'control/action' ) ) try : resp = yield self . _manager . disconnect ( conn_id ) except Exception as exc : self . _logger . exception ( "Error in manager disconnect" ) resp = { 'success' : False , 'reason' : "Internal error: %s" % str ( exc ) } self . _manager . remove_monitor ( conn_data [ 'report_monitor' ] ) self . _manager . remove_monitor ( conn_data [ 'trace_monitor' ] ) if resp [ 'success' ] : del self . _connections [ uuid ] message [ 'success' ] = True else : message [ 'success' ] = False message [ 'failure_reason' ] = resp [ 'reason' ] self . _logger . info ( "Client %s disconnected from device 0x%X" , client , uuid ) if unsolicited and resp [ 'success' ] : self . _publish_response ( slug , { 'client' : client , 'type' : 'notification' , 'operation' : 'disconnect' } ) elif not unsolicited : self . _publish_response ( slug , message ) | Disconnect from a device that we have previously connected to . |
21,924 | def _notify_report ( self , device_uuid , event_name , report ) : if device_uuid not in self . _connections : self . _logger . debug ( "Dropping report for device without an active connection, uuid=0x%X" , device_uuid ) return slug = self . _build_device_slug ( device_uuid ) streaming_topic = self . topics . prefix + 'devices/{}/data/streaming' . format ( slug ) data = { 'type' : 'notification' , 'operation' : 'report' } ser = report . serialize ( ) data [ 'received_time' ] = ser [ 'received_time' ] . strftime ( "%Y%m%dT%H:%M:%S.%fZ" ) . encode ( ) data [ 'report_origin' ] = ser [ 'origin' ] data [ 'report_format' ] = ser [ 'report_format' ] data [ 'report' ] = binascii . hexlify ( ser [ 'encoded_report' ] ) data [ 'fragment_count' ] = 1 data [ 'fragment_index' ] = 0 self . _logger . debug ( "Publishing report: (topic=%s)" , streaming_topic ) self . client . publish ( streaming_topic , data ) | Notify that a report has been received from a device . |
21,925 | def _notify_trace ( self , device_uuid , event_name , trace ) : if device_uuid not in self . _connections : self . _logger . debug ( "Dropping trace data for device without an active connection, uuid=0x%X" , device_uuid ) return conn_data = self . _connections [ device_uuid ] last_trace = conn_data [ 'last_trace' ] now = monotonic ( ) conn_data [ 'trace_accum' ] += bytes ( trace ) if last_trace is not None and ( now - last_trace ) < self . throttle_trace : if not conn_data [ 'trace_scheduled' ] : self . _loop . call_later ( self . throttle_trace - ( now - last_trace ) , self . _send_accum_trace , device_uuid ) conn_data [ 'trace_scheduled' ] = True self . _logger . debug ( "Deferring trace data due to throttling uuid=0x%X" , device_uuid ) else : self . _send_accum_trace ( device_uuid ) | Notify that we have received tracing data from a device . |
21,926 | def _send_accum_trace ( self , device_uuid ) : if device_uuid not in self . _connections : self . _logger . debug ( "Dropping trace data for device without an active connection, uuid=0x%X" , device_uuid ) return conn_data = self . _connections [ device_uuid ] trace = conn_data [ 'trace_accum' ] if len ( trace ) > 0 : slug = self . _build_device_slug ( device_uuid ) tracing_topic = self . topics . prefix + 'devices/{}/data/tracing' . format ( slug ) data = { 'type' : 'notification' , 'operation' : 'trace' } data [ 'trace' ] = binascii . hexlify ( trace ) data [ 'trace_origin' ] = device_uuid self . _logger . debug ( 'Publishing trace: (topic=%s)' , tracing_topic ) self . client . publish ( tracing_topic , data ) conn_data [ 'trace_scheduled' ] = False conn_data [ 'last_trace' ] = monotonic ( ) conn_data [ 'trace_accum' ] = bytes ( ) | Send whatever accumulated tracing data we have for the device . |
21,927 | def _on_scan_request ( self , sequence , topic , message ) : if messages . ProbeCommand . matches ( message ) : self . _logger . debug ( "Received probe message on topic %s, message=%s" , topic , message ) self . _loop . add_callback ( self . _publish_scan_response , message [ 'client' ] ) else : self . _logger . warn ( "Invalid message received on topic %s, message=%s" , topic , message ) | Process a request for scanning information |
21,928 | def _publish_scan_response ( self , client ) : devices = self . _manager . scanned_devices converted_devs = [ ] for uuid , info in devices . items ( ) : slug = self . _build_device_slug ( uuid ) message = { } message [ 'uuid' ] = uuid if uuid in self . _connections : message [ 'user_connected' ] = True elif 'user_connected' in info : message [ 'user_connected' ] = info [ 'user_connected' ] else : message [ 'user_connected' ] = False message [ 'connection_string' ] = slug message [ 'signal_strength' ] = info [ 'signal_strength' ] converted_devs . append ( { x : y for x , y in message . items ( ) } ) message [ 'type' ] = 'notification' message [ 'operation' ] = 'advertisement' self . client . publish ( self . topics . gateway_topic ( slug , 'data/advertisement' ) , message ) probe_message = { } probe_message [ 'type' ] = 'response' probe_message [ 'client' ] = client probe_message [ 'success' ] = True probe_message [ 'devices' ] = converted_devs self . client . publish ( self . topics . status , probe_message ) | Publish a scan response message |
21,929 | def _versioned_lib_suffix ( env , suffix , version ) : Verbose = False if Verbose : print ( "_versioned_lib_suffix: suffix={:r}" . format ( suffix ) ) print ( "_versioned_lib_suffix: version={:r}" . format ( version ) ) if not suffix . endswith ( version ) : suffix = suffix + '.' + version if Verbose : print ( "_versioned_lib_suffix: return suffix={:r}" . format ( suffix ) ) return suffix | For suffix = . so and version = 0 . 1 . 2 it returns . so . 0 . 1 . 2 |
21,930 | def _setup_versioned_lib_variables ( env , ** kw ) : tool = None try : tool = kw [ 'tool' ] except KeyError : pass use_soname = False try : use_soname = kw [ 'use_soname' ] except KeyError : pass if use_soname : if tool == 'sunlink' : env [ '_SHLIBVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS -h $_SHLIBSONAME' env [ '_LDMODULEVERSIONFLAGS' ] = '$LDMODULEVERSIONFLAGS -h $_LDMODULESONAME' else : env [ '_SHLIBVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS -Wl,-soname=$_SHLIBSONAME' env [ '_LDMODULEVERSIONFLAGS' ] = '$LDMODULEVERSIONFLAGS -Wl,-soname=$_LDMODULESONAME' env [ '_SHLIBSONAME' ] = '${ShLibSonameGenerator(__env__,TARGET)}' env [ '_LDMODULESONAME' ] = '${LdModSonameGenerator(__env__,TARGET)}' env [ 'ShLibSonameGenerator' ] = SCons . Tool . ShLibSonameGenerator env [ 'LdModSonameGenerator' ] = SCons . Tool . LdModSonameGenerator else : env [ '_SHLIBVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS' env [ '_LDMODULEVERSIONFLAGS' ] = '$LDMODULEVERSIONFLAGS' env [ 'LDMODULEVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS' | Setup all variables required by the versioning machinery |
21,931 | def main ( argv = None , loop = SharedLoop , max_time = None ) : should_raise = argv is not None if argv is None : argv = sys . argv [ 1 : ] parser = build_parser ( ) cmd_args = parser . parse_args ( argv ) configure_logging ( cmd_args . verbose ) logger = logging . getLogger ( __name__ ) try : args = { } if cmd_args . config is not None : try : with open ( cmd_args . config , "r" ) as conf : args = json . load ( conf ) except IOError as exc : raise ScriptError ( "Could not open config file %s due to %s" % ( cmd_args . config , str ( exc ) ) , 2 ) except ValueError as exc : raise ScriptError ( "Could not parse JSON from config file %s due to %s" % ( cmd_args . config , str ( exc ) ) , 3 ) except TypeError as exc : raise ScriptError ( "You must pass the path to a json config file" , 4 ) logger . critical ( "Starting gateway" ) gateway = IOTileGateway ( args , loop = loop ) loop . run_coroutine ( gateway . start ( ) ) logger . critical ( "Gateway running" ) loop . wait_for_interrupt ( max_time = max_time ) loop . run_coroutine ( gateway . stop ( ) ) except ScriptError as exc : if should_raise : raise exc logger . fatal ( "Quitting due to error: %s" , exc . msg ) return exc . code except Exception as exc : if should_raise : raise exc logger . exception ( "Fatal error running gateway" ) return 1 return 0 | Main entry point for iotile - gateway . |
21,932 | def copy ( self ) : return _TimeAnchor ( self . reading_id , self . uptime , self . utc , self . is_break , self . exact ) | Return a copy of this _TimeAnchor . |
21,933 | def anchor_stream ( self , stream_id , converter = "rtc" ) : if isinstance ( converter , str ) : converter = self . _known_converters . get ( converter ) if converter is None : raise ArgumentError ( "Unknown anchor converter string: %s" % converter , known_converters = list ( self . _known_converters ) ) self . _anchor_streams [ stream_id ] = converter | Mark a stream as containing anchor points . |
21,934 | def id_range ( self ) : if len ( self . _anchor_points ) == 0 : return ( 0 , 0 ) return ( self . _anchor_points [ 0 ] . reading_id , self . _anchor_points [ - 1 ] . reading_id ) | Get the range of archor reading_ids . |
21,935 | def _convert_epoch_anchor ( cls , reading ) : delta = datetime . timedelta ( seconds = reading . value ) return cls . _EpochReference + delta | Convert a reading containing an epoch timestamp to datetime . |
21,936 | def add_point ( self , reading_id , uptime = None , utc = None , is_break = False ) : if reading_id == 0 : return if uptime is None and utc is None : return if uptime is not None and uptime & ( 1 << 31 ) : if utc is not None : return uptime &= ~ ( 1 << 31 ) utc = self . convert_rtc ( uptime ) uptime = None anchor = _TimeAnchor ( reading_id , uptime , utc , is_break , exact = utc is not None ) if anchor in self . _anchor_points : return self . _anchor_points . add ( anchor ) self . _prepared = False | Add a time point that could be used as a UTC reference . |
21,937 | def add_reading ( self , reading ) : is_break = False utc = None if reading . stream in self . _break_streams : is_break = True if reading . stream in self . _anchor_streams : utc = self . _anchor_streams [ reading . stream ] ( reading ) self . add_point ( reading . reading_id , reading . raw_time , utc , is_break = is_break ) | Add an IOTileReading . |
21,938 | def add_report ( self , report , ignore_errors = False ) : if not isinstance ( report , SignedListReport ) : if ignore_errors : return raise ArgumentError ( "You can only add SignedListReports to a UTCAssigner" , report = report ) for reading in report . visible_readings : self . add_reading ( reading ) self . add_point ( report . report_id , report . sent_timestamp , report . received_time ) | Add all anchors from a report . |
21,939 | def assign_utc ( self , reading_id , uptime = None , prefer = "before" ) : if prefer not in ( "before" , "after" ) : raise ArgumentError ( "Invalid prefer parameter: {}, must be 'before' or 'after'" . format ( prefer ) ) if len ( self . _anchor_points ) == 0 : return None if reading_id > self . _anchor_points [ - 1 ] . reading_id : return None i = self . _anchor_points . bisect_key_left ( reading_id ) found_id = False crossed_break = False exact = True last = self . _anchor_points [ i ] . copy ( ) if uptime is not None : last . uptime = uptime if last . reading_id == reading_id : found_id = True if last . utc is not None : return UTCAssignment ( reading_id , last . utc , found_id , exact , crossed_break ) left_assign = self . _fix_left ( reading_id , last , i , found_id ) if left_assign is not None and left_assign . exact : return left_assign right_assign = self . _fix_right ( reading_id , last , i , found_id ) if right_assign is not None and right_assign . exact : return right_assign return self . _pick_best_fix ( left_assign , right_assign , prefer ) | Assign a utc datetime to a reading id . |
21,940 | def ensure_prepared ( self ) : if self . _prepared : return exact_count = 0 fixed_count = 0 inexact_count = 0 self . _logger . debug ( "Preparing UTCAssigner (%d total anchors)" , len ( self . _anchor_points ) ) for curr in self . _anchor_points : if not curr . exact : assignment = self . assign_utc ( curr . reading_id , curr . uptime ) if assignment is not None and assignment . exact : curr . utc = assignment . utc curr . exact = True fixed_count += 1 else : inexact_count += 1 else : exact_count += 1 self . _logger . debug ( "Prepared UTCAssigner with %d reference points, " "%d exact anchors and %d inexact anchors" , exact_count , fixed_count , inexact_count ) self . _prepared = True | Calculate and cache UTC values for all exactly known anchor points . |
21,941 | def fix_report ( self , report , errors = "drop" , prefer = "before" ) : if not isinstance ( report , SignedListReport ) : raise ArgumentError ( "Report must be a SignedListReport" , report = report ) if errors not in ( 'drop' , ) : raise ArgumentError ( "Unknown errors handler: {}, supported=['drop']" . format ( errors ) ) self . ensure_prepared ( ) fixed_readings = [ ] dropped_readings = 0 for reading in report . visible_readings : assignment = self . assign_utc ( reading . reading_id , reading . raw_time , prefer = prefer ) if assignment is None : dropped_readings += 1 continue fixed_reading = IOTileReading ( assignment . rtc_value , reading . stream , reading . value , reading_time = assignment . utc , reading_id = reading . reading_id ) fixed_readings . append ( fixed_reading ) fixed_report = SignedListReport . FromReadings ( report . origin , fixed_readings , report_id = report . report_id , selector = report . streamer_selector , streamer = report . origin_streamer , sent_timestamp = report . sent_timestamp ) fixed_report . received_time = report . received_time if dropped_readings > 0 : self . _logger . warning ( "Dropped %d readings of %d when fixing UTC timestamps in report 0x%08X for device 0x%08X" , dropped_readings , len ( report . visible_readings ) , report . report_id , report . origin ) return fixed_report | Perform utc assignment on all readings in a report . |
21,942 | def _fix_left ( self , reading_id , last , start , found_id ) : accum_delta = 0 exact = True crossed_break = False if start == 0 : return None for curr in self . _anchor_points . islice ( None , start - 1 , reverse = True ) : if curr . uptime is None or last . uptime is None : exact = False elif curr . is_break or last . uptime < curr . uptime : exact = False crossed_break = True else : accum_delta += last . uptime - curr . uptime if curr . utc is not None : time_delta = datetime . timedelta ( seconds = accum_delta ) return UTCAssignment ( reading_id , curr . utc + time_delta , found_id , exact , crossed_break ) last = curr return None | Fix a reading by looking for the nearest anchor point before it . |
21,943 | def sconsign_dir ( node ) : if not node . _sconsign : import SCons . SConsign node . _sconsign = SCons . SConsign . ForDirectory ( node ) return node . _sconsign | Return the . sconsign file info for this directory creating it first if necessary . |
21,944 | def __get_base_path ( self ) : entry = self . get ( ) return SCons . Subst . SpecialAttrWrapper ( SCons . Util . splitext ( entry . get_path ( ) ) [ 0 ] , entry . name + "_base" ) | Return the file s directory and file name with the suffix stripped . |
21,945 | def __get_windows_path ( self ) : if OS_SEP == '\\' : return self else : entry = self . get ( ) r = entry . get_path ( ) . replace ( OS_SEP , '\\' ) return SCons . Subst . SpecialAttrWrapper ( r , entry . name + "_windows" ) | Return the path with \ as the path separator regardless of platform . |
21,946 | def must_be_same ( self , klass ) : if isinstance ( self , klass ) or klass is Entry : return raise TypeError ( "Tried to lookup %s '%s' as a %s." % ( self . __class__ . __name__ , self . get_internal_path ( ) , klass . __name__ ) ) | This node which already existed is being looked up as the specified klass . Raise an exception if it isn t . |
21,947 | def srcnode ( self ) : srcdir_list = self . dir . srcdir_list ( ) if srcdir_list : srcnode = srcdir_list [ 0 ] . Entry ( self . name ) srcnode . must_be_same ( self . __class__ ) return srcnode return self | If this node is in a build path return the node corresponding to its source file . Otherwise return ourself . |
21,948 | def get_path ( self , dir = None ) : if not dir : dir = self . fs . getcwd ( ) if self == dir : return '.' path_elems = self . get_path_elements ( ) pathname = '' try : i = path_elems . index ( dir ) except ValueError : for p in path_elems [ : - 1 ] : pathname += p . dirname else : for p in path_elems [ i + 1 : - 1 ] : pathname += p . dirname return pathname + path_elems [ - 1 ] . name | Return path relative to the current working directory of the Node . FS . Base object that owns us . |
21,949 | def set_src_builder ( self , builder ) : self . sbuilder = builder if not self . has_builder ( ) : self . builder_set ( builder ) | Set the source code builder for this node . |
21,950 | def src_builder ( self ) : try : scb = self . sbuilder except AttributeError : scb = self . dir . src_builder ( ) self . sbuilder = scb return scb | Fetch the source code builder for this node . |
21,951 | def Rfindalldirs ( self , pathlist ) : try : memo_dict = self . _memo [ 'Rfindalldirs' ] except KeyError : memo_dict = { } self . _memo [ 'Rfindalldirs' ] = memo_dict else : try : return memo_dict [ pathlist ] except KeyError : pass create_dir_relative_to_self = self . Dir result = [ ] for path in pathlist : if isinstance ( path , SCons . Node . Node ) : result . append ( path ) else : dir = create_dir_relative_to_self ( path ) result . extend ( dir . get_all_rdirs ( ) ) memo_dict [ pathlist ] = result return result | Return all of the directories for a given path list including corresponding backing directories in any repositories . |
21,952 | def RDirs ( self , pathlist ) : cwd = self . cwd or self . fs . _cwd return cwd . Rfindalldirs ( pathlist ) | Search for a list of directories in the Repository list . |
21,953 | def rfile ( self ) : self . __class__ = File self . _morph ( ) self . clear ( ) return File . rfile ( self ) | We re a generic Entry but the caller is actually looking for a File at this point so morph into one . |
21,954 | def get_text_contents ( self ) : try : self = self . disambiguate ( must_exist = 1 ) except SCons . Errors . UserError : return '' else : return self . get_text_contents ( ) | Fetch the decoded text contents of a Unicode encoded Entry . |
21,955 | def must_be_same ( self , klass ) : if self . __class__ is not klass : self . __class__ = klass self . _morph ( ) self . clear ( ) | Called to make sure a Node is a Dir . Since we re an Entry we can morph into one . |
21,956 | def chdir ( self , dir , change_os_dir = 0 ) : curr = self . _cwd try : if dir is not None : self . _cwd = dir if change_os_dir : os . chdir ( dir . get_abspath ( ) ) except OSError : self . _cwd = curr raise | Change the current working directory for lookups . If change_os_dir is true we will also change the real cwd to match . |
21,957 | def get_root ( self , drive ) : drive = _my_normcase ( drive ) try : return self . Root [ drive ] except KeyError : root = RootDir ( drive , self ) self . Root [ drive ] = root if not drive : self . Root [ self . defaultDrive ] = root elif drive == self . defaultDrive : self . Root [ '' ] = root return root | Returns the root directory for the specified drive creating it if necessary . |
21,958 | def _lookup ( self , p , directory , fsclass , create = 1 ) : if isinstance ( p , Base ) : p . must_be_same ( fsclass ) return p p = str ( p ) if not os_sep_is_slash : p = p . replace ( OS_SEP , '/' ) if p [ 0 : 1 ] == '#' : p = p [ 1 : ] directory = self . Top if do_splitdrive : drive , p = _my_splitdrive ( p ) if drive : root = self . get_root ( drive ) else : root = directory . root else : root = directory . root p = p . strip ( '/' ) needs_normpath = needs_normpath_match ( p ) if p in ( '' , '.' ) : p = directory . get_labspath ( ) else : p = directory . get_labspath ( ) + '/' + p else : if do_splitdrive : drive , p = _my_splitdrive ( p ) if drive and not p : p = '/' else : drive = '' if p != '/' : p = p . rstrip ( '/' ) needs_normpath = needs_normpath_match ( p ) if p [ 0 : 1 ] == '/' : root = self . get_root ( drive ) else : if directory : if not isinstance ( directory , Dir ) : directory = self . Dir ( directory ) else : directory = self . _cwd if p in ( '' , '.' ) : p = directory . get_labspath ( ) else : p = directory . get_labspath ( ) + '/' + p if drive : root = self . get_root ( drive ) else : root = directory . root if needs_normpath is not None : ins = p . split ( '/' ) [ 1 : ] outs = [ ] for d in ins : if d == '..' : try : outs . pop ( ) except IndexError : pass elif d not in ( '' , '.' ) : outs . append ( d ) p = '/' + '/' . join ( outs ) return root . _lookup_abs ( p , fsclass , create ) | The generic entry point for Node lookup with user - supplied data . |
21,959 | def VariantDir ( self , variant_dir , src_dir , duplicate = 1 ) : if not isinstance ( src_dir , SCons . Node . Node ) : src_dir = self . Dir ( src_dir ) if not isinstance ( variant_dir , SCons . Node . Node ) : variant_dir = self . Dir ( variant_dir ) if src_dir . is_under ( variant_dir ) : raise SCons . Errors . UserError ( "Source directory cannot be under variant directory." ) if variant_dir . srcdir : if variant_dir . srcdir == src_dir : return raise SCons . Errors . UserError ( "'%s' already has a source directory: '%s'." % ( variant_dir , variant_dir . srcdir ) ) variant_dir . link ( src_dir , duplicate ) | Link the supplied variant directory to the source directory for purposes of building files . |
21,960 | def Repository ( self , * dirs ) : for d in dirs : if not isinstance ( d , SCons . Node . Node ) : d = self . Dir ( d ) self . Top . addRepository ( d ) | Specify Repository directories to search . |
21,961 | def variant_dir_target_climb ( self , orig , dir , tail ) : targets = [ ] message = None fmt = "building associated VariantDir targets: %s" start_dir = dir while dir : for bd in dir . variant_dirs : if start_dir . is_under ( bd ) : return [ orig ] , fmt % str ( orig ) p = os . path . join ( bd . _path , * tail ) targets . append ( self . Entry ( p ) ) tail = [ dir . name ] + tail dir = dir . up ( ) if targets : message = fmt % ' ' . join ( map ( str , targets ) ) return targets , message | Create targets in corresponding variant directories |
21,962 | def Dir ( self , name , create = True ) : return self . fs . Dir ( name , self , create ) | Looks up or creates a directory node named name relative to this directory . |
21,963 | def link ( self , srcdir , duplicate ) : self . srcdir = srcdir self . duplicate = duplicate self . __clearRepositoryCache ( duplicate ) srcdir . variant_dirs . append ( self ) | Set this directory as the variant directory for the supplied source directory . |
21,964 | def getRepositories ( self ) : if self . srcdir and not self . duplicate : return self . srcdir . get_all_rdirs ( ) + self . repositories return self . repositories | Returns a list of repositories for this directory . |
21,965 | def rel_path ( self , other ) : try : memo_dict = self . _memo [ 'rel_path' ] except KeyError : memo_dict = { } self . _memo [ 'rel_path' ] = memo_dict else : try : return memo_dict [ other ] except KeyError : pass if self is other : result = '.' elif not other in self . _path_elements : try : other_dir = other . get_dir ( ) except AttributeError : result = str ( other ) else : if other_dir is None : result = other . name else : dir_rel_path = self . rel_path ( other_dir ) if dir_rel_path == '.' : result = other . name else : result = dir_rel_path + OS_SEP + other . name else : i = self . _path_elements . index ( other ) + 1 path_elems = [ '..' ] * ( len ( self . _path_elements ) - i ) + [ n . name for n in other . _path_elements [ i : ] ] result = OS_SEP . join ( path_elems ) memo_dict [ other ] = result return result | Return a path to other relative to this directory . |
21,966 | def get_found_includes ( self , env , scanner , path ) : if not scanner : return [ ] self . clear ( ) return scanner ( self , env , path ) | Return this directory s implicit dependencies . |
21,967 | def build ( self , ** kw ) : global MkdirBuilder if self . builder is not MkdirBuilder : SCons . Node . Node . build ( self , ** kw ) | A null builder for directories . |
21,968 | def _create ( self ) : listDirs = [ ] parent = self while parent : if parent . exists ( ) : break listDirs . append ( parent ) p = parent . up ( ) if p is None : raise SCons . Errors . StopError ( parent . _path ) parent = p listDirs . reverse ( ) for dirnode in listDirs : try : SCons . Node . Node . build ( dirnode ) dirnode . get_executor ( ) . nullify ( ) dirnode . clear ( ) except OSError : pass | Create this directory silently and without worrying about whether the builder is the default or not . |
21,969 | def is_up_to_date ( self ) : if self . builder is not MkdirBuilder and not self . exists ( ) : return 0 up_to_date = SCons . Node . up_to_date for kid in self . children ( ) : if kid . get_state ( ) > up_to_date : return 0 return 1 | If any child is not up - to - date then this directory isn t either . |
21,970 | def get_timestamp ( self ) : stamp = 0 for kid in self . children ( ) : if kid . get_timestamp ( ) > stamp : stamp = kid . get_timestamp ( ) return stamp | Return the latest timestamp from among our children |
21,971 | def walk ( self , func , arg ) : entries = self . entries names = list ( entries . keys ( ) ) names . remove ( '.' ) names . remove ( '..' ) func ( arg , self , names ) for dirname in [ n for n in names if isinstance ( entries [ n ] , Dir ) ] : entries [ dirname ] . walk ( func , arg ) | Walk this directory tree by calling the specified function for each directory in the tree . |
21,972 | def _glob1 ( self , pattern , ondisk = True , source = False , strings = False ) : search_dir_list = self . get_all_rdirs ( ) for srcdir in self . srcdir_list ( ) : search_dir_list . extend ( srcdir . get_all_rdirs ( ) ) selfEntry = self . Entry names = [ ] for dir in search_dir_list : node_names = [ v . name for k , v in dir . entries . items ( ) if k not in ( '.' , '..' ) ] names . extend ( node_names ) if not strings : for name in node_names : selfEntry ( name ) if ondisk : try : disk_names = os . listdir ( dir . _abspath ) except os . error : continue names . extend ( disk_names ) if not strings : if pattern [ 0 ] != '.' : disk_names = [ x for x in disk_names if x [ 0 ] != '.' ] disk_names = fnmatch . filter ( disk_names , pattern ) dirEntry = dir . Entry for name in disk_names : name = './' + name node = dirEntry ( name ) . disambiguate ( ) n = selfEntry ( name ) if n . __class__ != node . __class__ : n . __class__ = node . __class__ n . _morph ( ) names = set ( names ) if pattern [ 0 ] != '.' : names = [ x for x in names if x [ 0 ] != '.' ] names = fnmatch . filter ( names , pattern ) if strings : return names return [ self . entries [ _my_normcase ( n ) ] for n in names ] | Globs for and returns a list of entry names matching a single pattern in this directory . |
21,973 | def convert_to_sconsign ( self ) : if os_sep_is_slash : node_to_str = str else : def node_to_str ( n ) : try : s = n . get_internal_path ( ) except AttributeError : s = str ( n ) else : s = s . replace ( OS_SEP , '/' ) return s for attr in [ 'bsources' , 'bdepends' , 'bimplicit' ] : try : val = getattr ( self , attr ) except AttributeError : pass else : setattr ( self , attr , list ( map ( node_to_str , val ) ) ) | Converts this FileBuildInfo object for writing to a . sconsign file |
21,974 | def prepare_dependencies ( self ) : attrs = [ ( 'bsources' , 'bsourcesigs' ) , ( 'bdepends' , 'bdependsigs' ) , ( 'bimplicit' , 'bimplicitsigs' ) , ] for ( nattr , sattr ) in attrs : try : strings = getattr ( self , nattr ) nodeinfos = getattr ( self , sattr ) except AttributeError : continue if strings is None or nodeinfos is None : continue nodes = [ ] for s , ni in zip ( strings , nodeinfos ) : if not isinstance ( s , SCons . Node . Node ) : s = ni . str_to_node ( s ) nodes . append ( s ) setattr ( self , nattr , nodes ) | Prepares a FileBuildInfo object for explaining what changed |
21,975 | def Dir ( self , name , create = True ) : return self . dir . Dir ( name , create = create ) | Create a directory node named name relative to the directory of this file . |
21,976 | def _morph ( self ) : self . scanner_paths = { } if not hasattr ( self , '_local' ) : self . _local = 0 if not hasattr ( self , 'released_target_info' ) : self . released_target_info = False self . store_info = 1 self . _func_exists = 4 self . _func_get_contents = 3 self . changed_since_last_build = 4 if self . has_builder ( ) : self . changed_since_last_build = 5 | Turn a file system node into a File object . |
21,977 | def get_text_contents ( self ) : contents = self . get_contents ( ) if contents [ : len ( codecs . BOM_UTF8 ) ] == codecs . BOM_UTF8 : return contents [ len ( codecs . BOM_UTF8 ) : ] . decode ( 'utf-8' ) if contents [ : len ( codecs . BOM_UTF16_LE ) ] == codecs . BOM_UTF16_LE : return contents [ len ( codecs . BOM_UTF16_LE ) : ] . decode ( 'utf-16-le' ) if contents [ : len ( codecs . BOM_UTF16_BE ) ] == codecs . BOM_UTF16_BE : return contents [ len ( codecs . BOM_UTF16_BE ) : ] . decode ( 'utf-16-be' ) try : return contents . decode ( 'utf-8' ) except UnicodeDecodeError as e : try : return contents . decode ( 'latin-1' ) except UnicodeDecodeError as e : return contents . decode ( 'utf-8' , error = 'backslashreplace' ) | This attempts to figure out what the encoding of the text is based upon the BOM bytes and then decodes the contents so that it s a valid python string . |
21,978 | def get_content_hash ( self ) : if not self . rexists ( ) : return SCons . Util . MD5signature ( '' ) fname = self . rfile ( ) . get_abspath ( ) try : cs = SCons . Util . MD5filesignature ( fname , chunksize = SCons . Node . FS . File . md5_chunksize * 1024 ) except EnvironmentError as e : if not e . filename : e . filename = fname raise return cs | Compute and return the MD5 hash for this file . |
21,979 | def get_found_includes ( self , env , scanner , path ) : memo_key = ( id ( env ) , id ( scanner ) , path ) try : memo_dict = self . _memo [ 'get_found_includes' ] except KeyError : memo_dict = { } self . _memo [ 'get_found_includes' ] = memo_dict else : try : return memo_dict [ memo_key ] except KeyError : pass if scanner : result = [ n . disambiguate ( ) for n in scanner ( self , env , path ) ] else : result = [ ] memo_dict [ memo_key ] = result return result | Return the included implicit dependencies in this file . Cache results so we only scan the file once per path regardless of how many times this information is requested . |
21,980 | def push_to_cache ( self ) : if self . nocache : return self . clear_memoized_values ( ) if self . exists ( ) : self . get_build_env ( ) . get_CacheDir ( ) . push ( self ) | Try to push the node into a cache |
21,981 | def retrieve_from_cache ( self ) : if self . nocache : return None if not self . is_derived ( ) : return None return self . get_build_env ( ) . get_CacheDir ( ) . retrieve ( self ) | Try to retrieve the node s content from a cache |
21,982 | def release_target_info ( self ) : if ( self . released_target_info or SCons . Node . interactive ) : return if not hasattr ( self . attributes , 'keep_targetinfo' ) : self . changed ( allowcache = True ) self . get_contents_sig ( ) self . get_build_env ( ) self . executor = None self . _memo . pop ( 'rfile' , None ) self . prerequisites = None if not len ( self . ignore_set ) : self . ignore_set = None if not len ( self . implicit_set ) : self . implicit_set = None if not len ( self . depends_set ) : self . depends_set = None if not len ( self . ignore ) : self . ignore = None if not len ( self . depends ) : self . depends = None self . released_target_info = True | Called just after this node has been marked up - to - date or was built completely . |
21,983 | def has_src_builder ( self ) : try : scb = self . sbuilder except AttributeError : scb = self . sbuilder = self . find_src_builder ( ) return scb is not None | Return whether this Node has a source builder or not . |
21,984 | def alter_targets ( self ) : if self . is_derived ( ) : return [ ] , None return self . fs . variant_dir_target_climb ( self , self . dir , [ self . name ] ) | Return any corresponding targets in a variant directory . |
21,985 | def prepare ( self ) : SCons . Node . Node . prepare ( self ) if self . get_state ( ) != SCons . Node . up_to_date : if self . exists ( ) : if self . is_derived ( ) and not self . precious : self . _rmv_existing ( ) else : try : self . _createDir ( ) except SCons . Errors . StopError as drive : raise SCons . Errors . StopError ( "No drive `{}' for target `{}'." . format ( drive , self ) ) | Prepare for this file to be created . |
21,986 | def remove ( self ) : if self . exists ( ) or self . islink ( ) : self . fs . unlink ( self . get_internal_path ( ) ) return 1 return None | Remove this file . |
21,987 | def get_max_drift_csig ( self ) : old = self . get_stored_info ( ) mtime = self . get_timestamp ( ) max_drift = self . fs . max_drift if max_drift > 0 : if ( time . time ( ) - mtime ) > max_drift : try : n = old . ninfo if n . timestamp and n . csig and n . timestamp == mtime : return n . csig except AttributeError : pass elif max_drift == 0 : try : return old . ninfo . csig except AttributeError : pass return None | Returns the content signature currently stored for this node if it s been unmodified longer than the max_drift value or the max_drift value is 0 . Returns None otherwise . |
21,988 | def built ( self ) : SCons . Node . Node . built ( self ) if ( not SCons . Node . interactive and not hasattr ( self . attributes , 'keep_targetinfo' ) ) : SCons . Node . store_info_map [ self . store_info ] ( self ) self . _specific_sources = False self . _labspath = None self . _save_str ( ) self . cwd = None self . scanner_paths = None | Called just after this File node is successfully built . |
21,989 | def changed ( self , node = None , allowcache = False ) : if node is None : try : return self . _memo [ 'changed' ] except KeyError : pass has_changed = SCons . Node . Node . changed ( self , node ) if allowcache : self . _memo [ 'changed' ] = has_changed return has_changed | Returns if the node is up - to - date with respect to the BuildInfo stored last time it was built . |
21,990 | def get_cachedir_csig ( self ) : try : return self . cachedir_csig except AttributeError : pass cachedir , cachefile = self . get_build_env ( ) . get_CacheDir ( ) . cachepath ( self ) if not self . exists ( ) and cachefile and os . path . exists ( cachefile ) : self . cachedir_csig = SCons . Util . MD5filesignature ( cachefile , SCons . Node . FS . File . md5_chunksize * 1024 ) else : self . cachedir_csig = self . get_csig ( ) return self . cachedir_csig | Fetch a Node s content signature for purposes of computing another Node s cachesig . |
21,991 | def get_contents_sig ( self ) : try : return self . contentsig except AttributeError : pass executor = self . get_executor ( ) result = self . contentsig = SCons . Util . MD5signature ( executor . get_contents ( ) ) return result | A helper method for get_cachedir_bsig . |
21,992 | def get_cachedir_bsig ( self ) : try : return self . cachesig except AttributeError : pass children = self . children ( ) sigs = [ n . get_cachedir_csig ( ) for n in children ] sigs . append ( self . get_contents_sig ( ) ) sigs . append ( self . get_internal_path ( ) ) result = self . cachesig = SCons . Util . MD5collect ( sigs ) return result | Return the signature for a cached file including its children . |
21,993 | def find_file ( self , filename , paths , verbose = None ) : memo_key = self . _find_file_key ( filename , paths ) try : memo_dict = self . _memo [ 'find_file' ] except KeyError : memo_dict = { } self . _memo [ 'find_file' ] = memo_dict else : try : return memo_dict [ memo_key ] except KeyError : pass if verbose and not callable ( verbose ) : if not SCons . Util . is_String ( verbose ) : verbose = "find_file" _verbose = u' %s: ' % verbose verbose = lambda s : sys . stdout . write ( _verbose + s ) filedir , filename = os . path . split ( filename ) if filedir : self . default_filedir = filedir paths = [ _f for _f in map ( self . filedir_lookup , paths ) if _f ] result = None for dir in paths : if verbose : verbose ( "looking for '%s' in '%s' ...\n" % ( filename , dir ) ) node , d = dir . srcdir_find_file ( filename ) if node : if verbose : verbose ( "... FOUND '%s' in '%s'\n" % ( filename , d ) ) result = node break memo_dict [ memo_key ] = result return result | Find a node corresponding to either a derived file or a file that exists already . |
21,994 | def run ( self , resources ) : hwman = resources [ 'connection' ] updater = hwman . hwman . app ( name = 'device_updater' ) updater . run_script ( self . _script , no_reboot = self . _no_reboot ) | Actually send the trub script . |
21,995 | def process_gatt_service ( services , event ) : length = len ( event . payload ) - 5 handle , start , end , uuid = unpack ( '<BHH%ds' % length , event . payload ) uuid = process_uuid ( uuid ) services [ uuid ] = { 'uuid_raw' : uuid , 'start_handle' : start , 'end_handle' : end } | Process a BGAPI event containing a GATT service description and add it to a dictionary |
21,996 | def handle_to_uuid ( handle , services ) : for service in services . values ( ) : for char_uuid , char_def in service [ 'characteristics' ] . items ( ) : if char_def [ 'handle' ] == handle : return char_uuid raise ValueError ( "Handle not found in GATT table" ) | Find the corresponding UUID for an attribute handle |
21,997 | def _validator ( key , val , env ) : if not env [ key ] in ( True , False ) : raise SCons . Errors . UserError ( 'Invalid value for boolean option %s: %s' % ( key , env [ key ] ) ) | Validates the given value to be either 0 or 1 . This is usable as validator for SCons Variables . |
21,998 | def FromDictionary ( cls , msg_dict ) : level = msg_dict . get ( 'level' ) msg = msg_dict . get ( 'message' ) now = msg_dict . get ( 'now_time' ) created = msg_dict . get ( 'created_time' ) count = msg_dict . get ( 'count' , 1 ) msg_id = msg_dict . get ( 'id' , 0 ) new_msg = ServiceMessage ( level , msg , msg_id , created , now ) if count > 1 : new_msg . count = count return new_msg | Create from a dictionary with kv pairs . |
21,999 | def to_dict ( self ) : msg_dict = { } msg_dict [ 'level' ] = self . level msg_dict [ 'message' ] = self . message msg_dict [ 'now_time' ] = monotonic ( ) msg_dict [ 'created_time' ] = self . created msg_dict [ 'id' ] = self . id msg_dict [ 'count' ] = self . count return msg_dict | Create a dictionary with the information in this message . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.