idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
22,400 | def get_config_database_info ( self ) : max_size = self . config_database . data_size max_entries = self . config_database . max_entries ( ) used_size = self . config_database . data_index used_entries = len ( self . config_database . entries ) invalid_size = sum ( x . data_space ( ) for x in self . config_database . entries if not x . valid ) invalid_entries = sum ( 1 for x in self . config_database . entries if not x . valid ) return [ max_size , used_size , invalid_size , used_entries , invalid_entries , max_entries , 0 ] | Get memory usage and space statistics on the config database . |
22,401 | def FindByName ( cls , name ) : if name . endswith ( '.py' ) : return cls . LoadFromFile ( name ) reg = ComponentRegistry ( ) for _name , tile in reg . load_extensions ( 'iotile.virtual_tile' , name_filter = name , class_filter = VirtualTile ) : return tile raise ArgumentError ( "VirtualTile could not be found by name" , name = name ) | Find an installed VirtualTile by name . |
22,402 | def LoadFromFile ( cls , script_path ) : _name , dev = ComponentRegistry ( ) . load_extension ( script_path , class_filter = VirtualTile , unique = True ) return dev | Import a virtual tile from a file rather than an installed module |
22,403 | def stage ( self ) : if 'PYPI_USER' not in os . environ or 'PYPI_PASS' not in os . environ : raise BuildError ( "You must set the PYPI_USER and PYPI_PASS environment variables" ) try : import twine except ImportError : raise BuildError ( "You must install twine in order to release python packages" , suggestion = "pip install twine" ) if not self . component . has_wheel : raise BuildError ( "You can't release a component to a PYPI repository if it doesn't have python packages" ) wheel = self . component . support_wheel sdist = "%s-%s.tar.gz" % ( self . component . support_distribution , self . component . parsed_version . pep440_string ( ) ) wheel_path = os . path . realpath ( os . path . abspath ( os . path . join ( self . component . output_folder , 'python' , wheel ) ) ) sdist_path = os . path . realpath ( os . path . abspath ( os . path . join ( self . component . output_folder , 'python' , sdist ) ) ) if not os . path . isfile ( wheel_path ) or not os . path . isfile ( sdist_path ) : raise BuildError ( "Could not find built wheel or sdist matching current built version" , sdist_path = sdist_path , wheel_path = wheel_path ) self . dists = [ sdist_path , wheel_path ] | Stage python packages for release verifying everything we can about them . |
22,404 | def add_data ( self , data ) : if self . state == self . ErrorState : return self . raw_data += bytearray ( data ) still_processing = True while still_processing : still_processing = self . process_data ( ) | Add data to our stream emitting reports as each new one is seen |
22,405 | def process_data ( self ) : further_processing = False if self . state == self . WaitingForReportType and len ( self . raw_data ) > 0 : self . current_type = self . raw_data [ 0 ] try : self . current_header_size = self . calculate_header_size ( self . current_type ) self . state = self . WaitingForReportHeader further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorFindingReportType , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForReportHeader and len ( self . raw_data ) >= self . current_header_size : try : self . current_report_size = self . calculate_report_size ( self . current_type , self . raw_data [ : self . current_header_size ] ) self . state = self . WaitingForCompleteReport further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingReportHeader , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForCompleteReport and len ( self . raw_data ) >= self . current_report_size : try : report_data = self . raw_data [ : self . current_report_size ] self . raw_data = self . raw_data [ self . current_report_size : ] report = self . parse_report ( self . current_type , report_data ) self . _handle_report ( report ) self . state = self . WaitingForReportType further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingCompleteReport , str ( exc ) , self . context ) else : raise return further_processing | Attempt to extract a report from the current data stream contents |
22,406 | def calculate_report_size ( self , current_type , report_header ) : fmt = self . known_formats [ current_type ] return fmt . ReportLength ( report_header ) | Determine the size of a report given its type and header |
22,407 | def parse_report ( self , current_type , report_data ) : fmt = self . known_formats [ current_type ] return fmt ( report_data ) | Parse a report into an IOTileReport subclass |
22,408 | def _handle_report ( self , report ) : keep_report = True if self . report_callback is not None : keep_report = self . report_callback ( report , self . context ) if keep_report : self . reports . append ( report ) | Try to emit a report and possibly keep a copy of it |
22,409 | def _POInitBuilder ( env , ** kw ) : import SCons . Action from SCons . Tool . GettextCommon import _init_po_files , _POFileBuilder action = SCons . Action . Action ( _init_po_files , None ) return _POFileBuilder ( env , action = action , target_alias = '$POCREATE_ALIAS' ) | Create builder object for POInit builder . |
22,410 | def generate ( env , ** kw ) : import SCons . Util from SCons . Tool . GettextCommon import _detect_msginit try : env [ 'MSGINIT' ] = _detect_msginit ( env ) except : env [ 'MSGINIT' ] = 'msginit' msginitcom = '$MSGINIT ${_MSGNoTranslator(__env__)} -l ${_MSGINITLOCALE}' + ' $MSGINITFLAGS -i $SOURCE -o $TARGET' env . SetDefault ( POSUFFIX = [ '.po' ] , POTSUFFIX = [ '.pot' ] , _MSGINITLOCALE = '${TARGET.filebase}' , _MSGNoTranslator = _optional_no_translator_flag , MSGINITCOM = msginitcom , MSGINITCOMSTR = '' , MSGINITFLAGS = [ ] , POAUTOINIT = False , POCREATE_ALIAS = 'po-create' ) env . Append ( BUILDERS = { '_POInitBuilder' : _POInitBuilder ( env ) } ) env . AddMethod ( _POInitBuilderWrapper , 'POInit' ) env . AlwaysBuild ( env . Alias ( '$POCREATE_ALIAS' ) ) | Generate the msginit tool |
22,411 | def open_bled112 ( port , logger ) : if port is not None and port != '<auto>' : logger . info ( "Using BLED112 adapter at %s" , port ) return serial . Serial ( port , _BAUD_RATE , timeout = 0.01 , rtscts = True , exclusive = True ) return _find_available_bled112 ( logger ) | Open a BLED112 adapter either by name or the first available . |
22,412 | def _find_ble_controllers ( self ) : controllers = self . bable . list_controllers ( ) return [ ctrl for ctrl in controllers if ctrl . powered and ctrl . low_energy ] | Get a list of the available and powered BLE controllers |
22,413 | def stop_scan ( self ) : try : self . bable . stop_scan ( sync = True ) except bable_interface . BaBLEException : pass self . scanning = False | Stop to scan . |
22,414 | def _open_rpc_interface ( self , connection_id , callback ) : try : context = self . connections . get_context ( connection_id ) except ArgumentError : callback ( connection_id , self . id , False , "Could not find connection information" ) return self . connections . begin_operation ( connection_id , 'open_interface' , callback , self . get_config ( 'default_timeout' ) ) try : service = context [ 'services' ] [ TileBusService ] header_characteristic = service [ ReceiveHeaderChar ] payload_characteristic = service [ ReceivePayloadChar ] except KeyError : self . connections . finish_operation ( connection_id , False , "Can't find characteristics to open rpc interface" ) return self . bable . set_notification ( enabled = True , connection_handle = context [ 'connection_handle' ] , characteristic = header_characteristic , on_notification_set = [ self . _on_interface_opened , context , payload_characteristic ] , on_notification_received = self . _on_notification_received , sync = False ) | Enable RPC interface for this IOTile device |
22,415 | def _open_streaming_interface ( self , connection_id , callback ) : try : context = self . connections . get_context ( connection_id ) except ArgumentError : callback ( connection_id , self . id , False , "Could not find connection information" ) return self . _logger . info ( "Attempting to enable streaming" ) self . connections . begin_operation ( connection_id , 'open_interface' , callback , self . get_config ( 'default_timeout' ) ) try : characteristic = context [ 'services' ] [ TileBusService ] [ StreamingChar ] except KeyError : self . connections . finish_operation ( connection_id , False , "Can't find characteristic to open streaming interface" ) return context [ 'parser' ] = IOTileReportParser ( report_callback = self . _on_report , error_callback = self . _on_report_error ) context [ 'parser' ] . context = connection_id def on_report_chunk_received ( report_chunk ) : context [ 'parser' ] . add_data ( report_chunk ) self . _register_notification_callback ( context [ 'connection_handle' ] , characteristic . value_handle , on_report_chunk_received ) self . bable . set_notification ( enabled = True , connection_handle = context [ 'connection_handle' ] , characteristic = characteristic , on_notification_set = [ self . _on_interface_opened , context ] , on_notification_received = self . _on_notification_received , timeout = 1.0 , sync = False ) | Enable streaming interface for this IOTile device |
22,416 | def _open_tracing_interface ( self , connection_id , callback ) : try : context = self . connections . get_context ( connection_id ) except ArgumentError : callback ( connection_id , self . id , False , "Could not find connection information" ) return self . _logger . info ( "Attempting to enable tracing" ) self . connections . begin_operation ( connection_id , 'open_interface' , callback , self . get_config ( 'default_timeout' ) ) try : characteristic = context [ 'services' ] [ TileBusService ] [ TracingChar ] except KeyError : self . connections . finish_operation ( connection_id , False , "Can't find characteristic to open tracing interface" ) return self . _register_notification_callback ( context [ 'connection_handle' ] , characteristic . value_handle , lambda trace_chunk : self . _trigger_callback ( 'on_trace' , connection_id , bytearray ( trace_chunk ) ) ) self . bable . set_notification ( enabled = True , connection_handle = context [ 'connection_handle' ] , characteristic = characteristic , on_notification_set = [ self . _on_interface_opened , context ] , on_notification_received = self . _on_notification_received , timeout = 1.0 , sync = False ) | Enable the tracing interface for this IOTile device |
22,417 | def _close_rpc_interface ( self , connection_id , callback ) : try : context = self . connections . get_context ( connection_id ) except ArgumentError : callback ( connection_id , self . id , False , "Could not find connection information" ) return self . connections . begin_operation ( connection_id , 'close_interface' , callback , self . get_config ( 'default_timeout' ) ) try : service = context [ 'services' ] [ TileBusService ] header_characteristic = service [ ReceiveHeaderChar ] payload_characteristic = service [ ReceivePayloadChar ] except KeyError : self . connections . finish_operation ( connection_id , False , "Can't find characteristics to open rpc interface" ) return self . bable . set_notification ( enabled = False , connection_handle = context [ 'connection_handle' ] , characteristic = header_characteristic , on_notification_set = [ self . _on_interface_closed , context , payload_characteristic ] , timeout = 1.0 ) | Disable RPC interface for this IOTile device |
22,418 | def _on_report ( self , report , connection_id ) : self . _logger . info ( 'Received report: %s' , str ( report ) ) self . _trigger_callback ( 'on_report' , connection_id , report ) return False | Callback function called when a report has been processed . |
22,419 | def _on_report_error ( self , code , message , connection_id ) : self . _logger . critical ( "Error receiving reports, no more reports will be processed on this adapter, code=%d, msg=%s" , code , message ) | Callback function called if an error occured while parsing a report |
22,420 | def _register_notification_callback ( self , connection_handle , attribute_handle , callback , once = False ) : notification_id = ( connection_handle , attribute_handle ) with self . notification_callbacks_lock : self . notification_callbacks [ notification_id ] = ( callback , once ) | Register a callback as a notification callback . It will be called if a notification with the matching connection_handle and attribute_handle is received . |
22,421 | def periodic_callback ( self ) : if self . stopped : return if not self . scanning and len ( self . connections . get_connections ( ) ) == 0 : self . _logger . info ( "Restarting scan for devices" ) self . start_scan ( self . _active_scan ) self . _logger . info ( "Finished restarting scan for devices" ) | Periodic cleanup tasks to maintain this adapter should be called every second . |
22,422 | def format_snippet ( sensor_graph ) : output = [ ] output . append ( "disable" ) output . append ( "clear" ) output . append ( "reset" ) for node in sensor_graph . dump_nodes ( ) : output . append ( 'add_node "{}"' . format ( node ) ) for streamer in sensor_graph . streamers : line = "add_streamer '{}' '{}' {} {} {}" . format ( streamer . selector , streamer . dest , streamer . automatic , streamer . format , streamer . report_type ) if streamer . with_other is not None : line += ' --withother {}' . format ( streamer . with_other ) output . append ( line ) for stream , value in sorted ( sensor_graph . constant_database . items ( ) , key = lambda x : x [ 0 ] . encode ( ) ) : output . append ( "set_constant '{}' {}" . format ( stream , value ) ) output . append ( "persist" ) output . append ( "back" ) app_tag = sensor_graph . metadata_database . get ( 'app_tag' ) app_version = sensor_graph . metadata_database . get ( 'app_version' ) if app_tag is not None : if app_version is None : app_version = "0.0" output . append ( "test_interface" ) output . append ( "set_version app %d --version '%s'" % ( app_tag , app_version ) ) output . append ( "back" ) output . append ( "config_database" ) output . append ( "clear_variables" ) for slot , conf_vars in sensor_graph . config_database . items ( ) : for conf_var , conf_def in conf_vars . items ( ) : conf_type , conf_val = conf_def if conf_type == 'binary' : conf_val = 'hex:' + hexlify ( conf_val ) elif isinstance ( conf_val , str ) : conf_val = '"%s"' % conf_val output . append ( "set_variable '{}' {} {} {}" . format ( slot , conf_var , conf_type , conf_val ) ) output . append ( "back" ) output . append ( "reset" ) return "\n" . join ( output ) + '\n' | Format this sensor graph as iotile command snippets . |
22,423 | def find_bled112_devices ( cls ) : found_devs = [ ] ports = serial . tools . list_ports . comports ( ) for port in ports : if not hasattr ( port , 'pid' ) or not hasattr ( port , 'vid' ) : continue if port . pid == 1 and port . vid == 9304 : found_devs . append ( port . device ) return found_devs | Look for BLED112 dongles on this computer and start an instance on each one |
22,424 | def get_scan_stats ( self ) : time_spent = time . time ( ) return self . _scan_event_count , self . _v1_scan_count , self . _v1_scan_response_count , self . _v2_scan_count , self . _device_scan_counts . copy ( ) , ( time_spent - self . _last_reset_time ) | Return the scan event statistics for this adapter |
22,425 | def reset_scan_stats ( self ) : self . _scan_event_count = 0 self . _v1_scan_count = 0 self . _v1_scan_response_count = 0 self . _v2_scan_count = 0 self . _device_scan_counts = { } self . _last_reset_time = time . time ( ) | Clears the scan event statistics and updates the last reset time |
22,426 | def start_scan ( self , active ) : self . _command_task . sync_command ( [ '_start_scan' , active ] ) self . scanning = True | Start the scanning task |
22,427 | def _open_tracing_interface ( self , conn_id , callback ) : try : handle = self . _find_handle ( conn_id ) services = self . _connections [ handle ] [ 'services' ] except ( ValueError , KeyError ) : callback ( conn_id , self . id , False , 'Connection closed unexpectedly before we could open the streaming interface' ) return self . _command_task . async_command ( [ '_enable_tracing' , handle , services ] , self . _on_interface_finished , { 'connection_id' : conn_id , 'callback' : callback } ) | Enable the debug tracing interface for this IOTile device |
22,428 | def _process_scan_event ( self , response ) : payload = response . payload length = len ( payload ) - 10 if length < 0 : return rssi , packet_type , sender , _addr_type , _bond , data = unpack ( "<bB6sBB%ds" % length , payload ) string_address = ':' . join ( [ format ( x , "02X" ) for x in bytearray ( sender [ : : - 1 ] ) ] ) if len ( data ) > 0 : data = bytearray ( data [ 1 : ] ) else : data = bytearray ( [ ] ) self . _scan_event_count += 1 if packet_type in ( 0 , 2 , 6 ) : if len ( data ) != 31 : return if data [ 22 ] == 0xFF and data [ 23 ] == 0xC0 and data [ 24 ] == 0x3 : self . _v1_scan_count += 1 self . _parse_v1_advertisement ( rssi , string_address , data ) elif data [ 3 ] == 27 and data [ 4 ] == 0x16 and data [ 5 ] == 0xdd and data [ 6 ] == 0xfd : self . _v2_scan_count += 1 self . _parse_v2_advertisement ( rssi , string_address , data ) else : pass elif packet_type == 4 : self . _v1_scan_response_count += 1 self . _parse_v1_scan_response ( string_address , data ) | Parse the BLE advertisement packet . |
22,429 | def _parse_v2_advertisement ( self , rssi , sender , data ) : if len ( data ) != 31 : return device_id , reboot_low , reboot_high_packed , flags , timestamp , battery , counter_packed , broadcast_stream_packed , broadcast_value , _mac = unpack ( "<LHBBLBBHLL" , data [ 7 : ] ) reboots = ( reboot_high_packed & 0xF ) << 16 | reboot_low counter = counter_packed & ( ( 1 << 5 ) - 1 ) broadcast_multiplex = counter_packed >> 5 broadcast_toggle = broadcast_stream_packed >> 15 broadcast_stream = broadcast_stream_packed & ( ( 1 << 15 ) - 1 ) self . _device_scan_counts . setdefault ( device_id , { 'v1' : 0 , 'v2' : 0 } ) [ 'v2' ] += 1 info = { 'connection_string' : sender , 'uuid' : device_id , 'pending_data' : bool ( flags & ( 1 << 0 ) ) , 'low_voltage' : bool ( flags & ( 1 << 1 ) ) , 'user_connected' : bool ( flags & ( 1 << 2 ) ) , 'signal_strength' : rssi , 'reboot_counter' : reboots , 'sequence' : counter , 'broadcast_toggle' : broadcast_toggle , 'timestamp' : timestamp , 'battery' : battery / 32.0 , 'advertising_version' : 2 } self . _trigger_callback ( 'on_scan' , self . id , info , self . ExpirationTime ) if broadcast_stream != 0xFFFF & ( ( 1 << 15 ) - 1 ) : if self . _throttle_broadcast and self . _check_update_seen_broadcast ( sender , timestamp , broadcast_stream , broadcast_value , broadcast_toggle , counter = counter , channel = broadcast_multiplex ) : return reading = IOTileReading ( timestamp , broadcast_stream , broadcast_value , reading_time = datetime . datetime . utcnow ( ) ) report = BroadcastReport . FromReadings ( info [ 'uuid' ] , [ reading ] , timestamp ) self . _trigger_callback ( 'on_report' , None , report ) | Parse the IOTile Specific advertisement packet |
22,430 | def probe_services ( self , handle , conn_id , callback ) : self . _command_task . async_command ( [ '_probe_services' , handle ] , callback , { 'connection_id' : conn_id , 'handle' : handle } ) | Given a connected device probe for its GATT services and characteristics |
22,431 | def probe_characteristics ( self , conn_id , handle , services ) : self . _command_task . async_command ( [ '_probe_characteristics' , handle , services ] , self . _probe_characteristics_finished , { 'connection_id' : conn_id , 'handle' : handle , 'services' : services } ) | Probe a device for all characteristics defined in its GATT table |
22,432 | def _on_disconnect ( self , result ) : success , _ , context = self . _parse_return ( result ) callback = context [ 'callback' ] connection_id = context [ 'connection_id' ] handle = context [ 'handle' ] callback ( connection_id , self . id , success , "No reason given" ) self . _remove_connection ( handle ) | Callback called when disconnection command finishes |
22,433 | def _parse_return ( cls , result ) : return_value = None success = result [ 'result' ] context = result [ 'context' ] if 'return_value' in result : return_value = result [ 'return_value' ] return success , return_value , context | Extract the result return value and context from a result object |
22,434 | def _get_connection ( self , handle , expect_state = None ) : conndata = self . _connections . get ( handle ) if conndata and expect_state is not None and conndata [ 'state' ] != expect_state : self . _logger . error ( "Connection in unexpected state, wanted=%s, got=%s" , expect_state , conndata [ 'state' ] ) return conndata | Get a connection object logging an error if its in an unexpected state |
22,435 | def _on_connection_finished ( self , result ) : success , retval , context = self . _parse_return ( result ) conn_id = context [ 'connection_id' ] callback = context [ 'callback' ] if success is False : callback ( conn_id , self . id , False , 'Timeout opening connection' ) with self . count_lock : self . connecting_count -= 1 return handle = retval [ 'handle' ] context [ 'disconnect_handler' ] = self . _on_connection_failed context [ 'connect_time' ] = time . time ( ) context [ 'state' ] = 'preparing' self . _connections [ handle ] = context self . probe_services ( handle , conn_id , self . _probe_services_finished ) | Callback when the connection attempt to a BLE device has finished |
22,436 | def _on_connection_failed ( self , conn_id , handle , clean , reason ) : with self . count_lock : self . connecting_count -= 1 self . _logger . info ( "_on_connection_failed conn_id=%d, reason=%s" , conn_id , str ( reason ) ) conndata = self . _get_connection ( handle ) if conndata is None : self . _logger . info ( "Unable to obtain connection data on unknown connection %d" , conn_id ) return callback = conndata [ 'callback' ] conn_id = conndata [ 'connection_id' ] failure_reason = conndata [ 'failure_reason' ] if 'error_code' in conndata and conndata [ 'error_code' ] == 0x23e and conndata [ 'retries' ] > 0 : self . _remove_connection ( handle ) self . connect_async ( conn_id , conndata [ 'connection_string' ] , callback , conndata [ 'retries' ] - 1 ) else : callback ( conn_id , self . id , False , failure_reason ) self . _remove_connection ( handle ) | Callback called from another thread when a connection attempt has failed . |
22,437 | def _probe_services_finished ( self , result ) : handle = result [ 'context' ] [ 'handle' ] conn_id = result [ 'context' ] [ 'connection_id' ] conndata = self . _get_connection ( handle , 'preparing' ) if conndata is None : self . _logger . info ( 'Connection disconnected before prob_services_finished, conn_id=%d' , conn_id ) return if result [ 'result' ] is False : conndata [ 'failed' ] = True conndata [ 'failure_reason' ] = 'Could not probe GATT services' self . disconnect_async ( conn_id , self . _on_connection_failed ) else : conndata [ 'services_done_time' ] = time . time ( ) self . probe_characteristics ( result [ 'context' ] [ 'connection_id' ] , result [ 'context' ] [ 'handle' ] , result [ 'return_value' ] [ 'services' ] ) | Callback called after a BLE device has had its GATT table completely probed |
22,438 | def _probe_characteristics_finished ( self , result ) : handle = result [ 'context' ] [ 'handle' ] conn_id = result [ 'context' ] [ 'connection_id' ] conndata = self . _get_connection ( handle , 'preparing' ) if conndata is None : self . _logger . info ( 'Connection disconnected before probe_char... finished, conn_id=%d' , conn_id ) return callback = conndata [ 'callback' ] if result [ 'result' ] is False : conndata [ 'failed' ] = True conndata [ 'failure_reason' ] = 'Could not probe GATT characteristics' self . disconnect_async ( conn_id , self . _on_connection_failed ) return services = result [ 'return_value' ] [ 'services' ] if TileBusService not in services : conndata [ 'failed' ] = True conndata [ 'failure_reason' ] = 'TileBus service not present in GATT services' self . disconnect_async ( conn_id , self . _on_connection_failed ) return conndata [ 'chars_done_time' ] = time . time ( ) service_time = conndata [ 'services_done_time' ] - conndata [ 'connect_time' ] char_time = conndata [ 'chars_done_time' ] - conndata [ 'services_done_time' ] total_time = service_time + char_time conndata [ 'state' ] = 'connected' conndata [ 'services' ] = services conndata [ 'parser' ] = IOTileReportParser ( report_callback = self . _on_report , error_callback = self . _on_report_error ) conndata [ 'parser' ] . context = conn_id del conndata [ 'disconnect_handler' ] with self . count_lock : self . connecting_count -= 1 self . _logger . info ( "Total time to connect to device: %.3f (%.3f enumerating services, %.3f enumerating chars)" , total_time , service_time , char_time ) callback ( conndata [ 'connection_id' ] , self . id , True , None ) | Callback when BLE adapter has finished probing services and characteristics for a device |
22,439 | def periodic_callback ( self ) : if self . stopped : return if not self . scanning and len ( self . _connections ) == 0 and self . connecting_count == 0 : self . _logger . info ( "Restarting scan for devices" ) self . start_scan ( self . _active_scan ) self . _logger . info ( "Finished restarting scan for devices" ) | Periodic cleanup tasks to maintain this adapter should be called every second |
22,440 | def convert_to_BuildError ( status , exc_info = None ) : if not exc_info and isinstance ( status , Exception ) : exc_info = ( status . __class__ , status , None ) if isinstance ( status , BuildError ) : buildError = status buildError . exitstatus = 2 elif isinstance ( status , ExplicitExit ) : status = status . status errstr = 'Explicit exit, status %s' % status buildError = BuildError ( errstr = errstr , status = status , exitstatus = status , exc_info = exc_info ) elif isinstance ( status , ( StopError , UserError ) ) : buildError = BuildError ( errstr = str ( status ) , status = 2 , exitstatus = 2 , exc_info = exc_info ) elif isinstance ( status , shutil . SameFileError ) : try : filename = status . filename except AttributeError : filename = None buildError = BuildError ( errstr = status . args [ 0 ] , status = status . errno , exitstatus = 2 , filename = filename , exc_info = exc_info ) elif isinstance ( status , ( EnvironmentError , OSError , IOError ) ) : try : filename = status . filename except AttributeError : filename = None buildError = BuildError ( errstr = status . strerror , status = status . errno , exitstatus = 2 , filename = filename , exc_info = exc_info ) elif isinstance ( status , Exception ) : buildError = BuildError ( errstr = '%s : %s' % ( status . __class__ . __name__ , status ) , status = 2 , exitstatus = 2 , exc_info = exc_info ) elif SCons . Util . is_String ( status ) : buildError = BuildError ( errstr = status , status = 2 , exitstatus = 2 ) else : buildError = BuildError ( errstr = "Error %s" % status , status = status , exitstatus = 2 ) return buildError | Convert any return code a BuildError Exception . |
22,441 | def format_config ( sensor_graph ) : cmdfile = CommandFile ( "Config Variables" , "1.0" ) for slot in sorted ( sensor_graph . config_database , key = lambda x : x . encode ( ) ) : for conf_var , conf_def in sorted ( sensor_graph . config_database [ slot ] . items ( ) ) : conf_type , conf_val = conf_def if conf_type == 'binary' : conf_val = 'hex:' + hexlify ( conf_val ) cmdfile . add ( "set_variable" , slot , conf_var , conf_type , conf_val ) return cmdfile . dump ( ) | Extract the config variables from this sensor graph in ASCII format . |
22,442 | def generate ( env ) : path , _f77 , _shf77 , version = get_xlf77 ( env ) if path : _f77 = os . path . join ( path , _f77 ) _shf77 = os . path . join ( path , _shf77 ) f77 . generate ( env ) env [ 'F77' ] = _f77 env [ 'SHF77' ] = _shf77 | Add Builders and construction variables for the Visual Age FORTRAN compiler to an Environment . |
22,443 | def DirScanner ( ** kw ) : kw [ 'node_factory' ] = SCons . Node . FS . Entry kw [ 'recursive' ] = only_dirs return SCons . Scanner . Base ( scan_on_disk , "DirScanner" , ** kw ) | Return a prototype Scanner instance for scanning directories for on - disk files |
22,444 | def DirEntryScanner ( ** kw ) : kw [ 'node_factory' ] = SCons . Node . FS . Entry kw [ 'recursive' ] = None return SCons . Scanner . Base ( scan_in_memory , "DirEntryScanner" , ** kw ) | Return a prototype Scanner instance for scanning directory Nodes for their in - memory entries |
22,445 | def scan_on_disk ( node , env , path = ( ) ) : try : flist = node . fs . listdir ( node . get_abspath ( ) ) except ( IOError , OSError ) : return [ ] e = node . Entry for f in filter ( do_not_scan , flist ) : e ( './' + f ) return scan_in_memory ( node , env , path ) | Scans a directory for on - disk files and directories therein . |
22,446 | def scan_in_memory ( node , env , path = ( ) ) : try : entries = node . entries except AttributeError : return [ ] entry_list = sorted ( filter ( do_not_scan , list ( entries . keys ( ) ) ) ) return [ entries [ n ] for n in entry_list ] | Scans a Node . FS . Dir for its in - memory entries . |
22,447 | def set_result ( self , result ) : if self . is_finished ( ) : raise InternalError ( "set_result called on finished AsynchronousResponse" , result = self . _result , exception = self . _exception ) self . _result = result self . finish ( ) | Finish this response and set the result . |
22,448 | def set_exception ( self , exc_class , exc_info , exc_stack ) : if self . is_finished ( ) : raise InternalError ( "set_exception called on finished AsynchronousResponse" , result = self . _result , exception = self . _exception ) self . _exception = ( exc_class , exc_info , exc_stack ) self . finish ( ) | Set an exception as the result of this operation . |
22,449 | def get_released_versions ( component ) : releases = get_tags ( ) releases = sorted ( [ ( x [ 0 ] , [ int ( y ) for y in x [ 1 ] . split ( '.' ) ] ) for x in releases ] , key = lambda x : x [ 1 ] ) [ : : - 1 ] return [ ( x [ 0 ] , "." . join ( map ( str , x [ 1 ] ) ) ) for x in releases if x [ 0 ] == component ] | Get all released versions of the given component ordered newest to oldest |
22,450 | def load_dependencies ( orig_tile , build_env ) : if 'DEPENDENCIES' not in build_env : build_env [ 'DEPENDENCIES' ] = [ ] dep_targets = [ ] chip = build_env [ 'ARCH' ] raw_arch_deps = chip . property ( 'depends' ) arch_deps = { } for key , value in raw_arch_deps . items ( ) : name , _ , _ = key . partition ( ',' ) arch_deps [ name ] = value for dep in orig_tile . dependencies : try : tile = IOTile ( os . path . join ( 'build' , 'deps' , dep [ 'unique_id' ] ) ) if dep [ 'name' ] not in arch_deps : tile . filter_products ( [ ] ) else : tile . filter_products ( arch_deps [ dep [ 'name' ] ] ) except ( ArgumentError , EnvironmentError ) : raise BuildError ( "Could not find required dependency" , name = dep [ 'name' ] ) build_env [ 'DEPENDENCIES' ] . append ( tile ) target = os . path . join ( tile . folder , 'module_settings.json' ) dep_targets . append ( target ) return dep_targets | Load all tile dependencies and filter only the products from each that we use |
22,451 | def find_dependency_wheels ( tile ) : return [ os . path . join ( x . folder , 'python' , x . support_wheel ) for x in _iter_dependencies ( tile ) if x . has_wheel ] | Return a list of all python wheel objects created by dependencies of this tile |
22,452 | def _check_ver_range ( self , version , ver_range ) : lower , upper , lower_inc , upper_inc = ver_range if lower is None and upper is None : return True if lower is not None : if lower_inc and version < lower : return False elif not lower_inc and version <= lower : return False if upper is not None : if upper_inc and version > upper : return False elif not upper_inc and version >= upper : return False if version . is_prerelease : if ( lower is None or not lower . is_prerelease ) and ( upper is None or not upper . is_prerelease ) : return False if ( lower is not None and version . release_tuple != lower . release_tuple ) and ( upper is not None and version . release_tuple != upper . release_tuple ) : return False return True | Check if version is included in ver_range |
22,453 | def _check_insersection ( self , version , ranges ) : for ver_range in ranges : if not self . _check_ver_range ( version , ver_range ) : return False return True | Check that a version is inside all of a list of ranges |
22,454 | def check ( self , version ) : for disjunct in self . _disjuncts : if self . _check_insersection ( version , disjunct ) : return True return False | Check that a version is inside this SemanticVersionRange |
22,455 | def filter ( self , versions , key = lambda x : x ) : return [ x for x in versions if self . check ( key ( x ) ) ] | Filter all of the versions in an iterable that match this version range |
22,456 | def FromString ( cls , range_string ) : disjuncts = None range_string = range_string . strip ( ) if len ( range_string ) == 0 : raise ArgumentError ( "You must pass a finite string to SemanticVersionRange.FromString" , range_string = range_string ) if len ( range_string ) == 1 and range_string [ 0 ] == '*' : conj = ( None , None , True , True ) disjuncts = [ [ conj ] ] elif range_string [ 0 ] == '^' : ver = range_string [ 1 : ] try : ver = SemanticVersion . FromString ( ver ) except DataError as err : raise ArgumentError ( "Could not parse ^X.Y.Z version" , parse_error = str ( err ) , range_string = range_string ) lower = ver upper = ver . inc_first_nonzero ( ) conj = ( lower , upper , True , False ) disjuncts = [ [ conj ] ] elif range_string [ 0 ] == '=' : ver = range_string [ 1 : ] try : ver = SemanticVersion . FromString ( ver ) except DataError as err : raise ArgumentError ( "Could not parse =X.Y.Z version" , parse_error = str ( err ) , range_string = range_string ) conj = ( ver , ver , True , True ) disjuncts = [ [ conj ] ] if disjuncts is None : raise ArgumentError ( "Invalid range specification that could not be parsed" , range_string = range_string ) return SemanticVersionRange ( disjuncts ) | Parse a version range string into a SemanticVersionRange |
22,457 | def _call_rpc ( self , address , rpc_id , payload ) : status , response = self . hw . stream . send_rpc ( address , rpc_id , payload , timeout = 1.1 ) return response | Call an RPC with the given information and return its response . |
22,458 | def FromReadings ( cls , uuid , readings ) : if len ( readings ) != 1 : raise ArgumentError ( "IndividualReading reports must be created with exactly one reading" , num_readings = len ( readings ) ) reading = readings [ 0 ] data = struct . pack ( "<BBHLLLL" , 0 , 0 , reading . stream , uuid , 0 , reading . raw_time , reading . value ) return IndividualReadingReport ( data ) | Generate an instance of the report format from a list of readings and a uuid |
22,459 | def decode ( self ) : fmt , _ , stream , uuid , sent_timestamp , reading_timestamp , reading_value = unpack ( "<BBHLLLL" , self . raw_report ) assert fmt == 0 time_base = self . received_time - datetime . timedelta ( seconds = sent_timestamp ) reading = IOTileReading ( reading_timestamp , stream , reading_value , time_base = time_base ) self . origin = uuid self . sent_timestamp = sent_timestamp return [ reading ] , [ ] | Decode this report into a single reading |
22,460 | def encode ( self ) : reading = self . visible_readings [ 0 ] data = struct . pack ( "<BBHLLLL" , 0 , 0 , reading . stream , self . origin , self . sent_timestamp , reading . raw_time , reading . value ) return bytearray ( data ) | Turn this report into a serialized bytearray that could be decoded with a call to decode |
22,461 | def is_LaTeX ( flist , env , abspath ) : savedpath = modify_env_var ( env , 'TEXINPUTS' , abspath ) paths = env [ 'ENV' ] [ 'TEXINPUTS' ] if SCons . Util . is_List ( paths ) : pass else : paths = paths . split ( os . pathsep ) if savedpath is _null : try : del env [ 'ENV' ] [ 'TEXINPUTS' ] except KeyError : pass else : env [ 'ENV' ] [ 'TEXINPUTS' ] = savedpath if Verbose : print ( "is_LaTeX search path " , paths ) print ( "files to search :" , flist ) for f in flist : if Verbose : print ( " checking for Latex source " , str ( f ) ) content = f . get_text_contents ( ) if LaTeX_re . search ( content ) : if Verbose : print ( "file %s is a LaTeX file" % str ( f ) ) return 1 if Verbose : print ( "file %s is not a LaTeX file" % str ( f ) ) inc_files = [ ] inc_files . extend ( include_re . findall ( content ) ) if Verbose : print ( "files included by '%s': " % str ( f ) , inc_files ) for src in inc_files : srcNode = FindFile ( src , [ '.tex' , '.ltx' , '.latex' ] , paths , env , requireExt = False ) fileList = [ srcNode , ] if Verbose : print ( "FindFile found " , srcNode ) if srcNode is not None : file_test = is_LaTeX ( fileList , env , abspath ) if file_test : return file_test if Verbose : print ( " done scanning " , str ( f ) ) return 0 | Scan a file list to decide if it s TeX - or LaTeX - flavored . |
22,462 | def TeXLaTeXStrFunction ( target = None , source = None , env = None ) : if env . GetOption ( "no_exec" ) : basedir = os . path . split ( str ( source [ 0 ] ) ) [ 0 ] abspath = os . path . abspath ( basedir ) if is_LaTeX ( source , env , abspath ) : result = env . subst ( '$LATEXCOM' , 0 , target , source ) + " ..." else : result = env . subst ( "$TEXCOM" , 0 , target , source ) + " ..." else : result = '' return result | A strfunction for TeX and LaTeX that scans the source file to decide the flavor of the source and then returns the appropriate command string . |
22,463 | def tex_eps_emitter ( target , source , env ) : ( target , source ) = tex_emitter_core ( target , source , env , TexGraphics ) return ( target , source ) | An emitter for TeX and LaTeX sources when executing tex or latex . It will accept . ps and . eps graphics files |
22,464 | def tex_pdf_emitter ( target , source , env ) : ( target , source ) = tex_emitter_core ( target , source , env , LatexGraphics ) return ( target , source ) | An emitter for TeX and LaTeX sources when executing pdftex or pdflatex . It will accept graphics files of types . pdf . jpg . png . gif and . tif |
22,465 | def generate ( env ) : global TeXLaTeXAction if TeXLaTeXAction is None : TeXLaTeXAction = SCons . Action . Action ( TeXLaTeXFunction , strfunction = TeXLaTeXStrFunction ) env . AppendUnique ( LATEXSUFFIXES = SCons . Tool . LaTeXSuffixes ) generate_common ( env ) from . import dvi dvi . generate ( env ) bld = env [ 'BUILDERS' ] [ 'DVI' ] bld . add_action ( '.tex' , TeXLaTeXAction ) bld . add_emitter ( '.tex' , tex_eps_emitter ) | Add Builders and construction variables for TeX to an Environment . |
22,466 | def is_win64 ( ) : global _is_win64 if _is_win64 is None : _is_win64 = False if os . environ . get ( 'PROCESSOR_ARCHITECTURE' , 'x86' ) != 'x86' : _is_win64 = True if os . environ . get ( 'PROCESSOR_ARCHITEW6432' ) : _is_win64 = True if os . environ . get ( 'ProgramW6432' ) : _is_win64 = True return _is_win64 | Return true if running on windows 64 bits . |
22,467 | def has_reg ( value ) : try : SCons . Util . RegOpenKeyEx ( SCons . Util . HKEY_LOCAL_MACHINE , value ) ret = True except SCons . Util . WinError : ret = False return ret | Return True if the given key exists in HKEY_LOCAL_MACHINE False otherwise . |
22,468 | def normalize_env ( env , keys , force = False ) : normenv = { } if env : for k in list ( env . keys ( ) ) : normenv [ k ] = copy . deepcopy ( env [ k ] ) for k in keys : if k in os . environ and ( force or not k in normenv ) : normenv [ k ] = os . environ [ k ] sys32_dir = os . path . join ( os . environ . get ( "SystemRoot" , os . environ . get ( "windir" , r"C:\Windows\system32" ) ) , "System32" ) if sys32_dir not in normenv [ 'PATH' ] : normenv [ 'PATH' ] = normenv [ 'PATH' ] + os . pathsep + sys32_dir sys32_wbem_dir = os . path . join ( sys32_dir , 'Wbem' ) if sys32_wbem_dir not in normenv [ 'PATH' ] : normenv [ 'PATH' ] = normenv [ 'PATH' ] + os . pathsep + sys32_wbem_dir debug ( "PATH: %s" % normenv [ 'PATH' ] ) return normenv | Given a dictionary representing a shell environment add the variables from os . environ needed for the processing of . bat files ; the keys are controlled by the keys argument . |
22,469 | def get_output ( vcbat , args = None , env = None ) : if env is None : env = SCons . Environment . Environment ( tools = [ ] ) vs_vc_vars = [ 'COMSPEC' , 'VS140COMNTOOLS' , 'VS120COMNTOOLS' , 'VS110COMNTOOLS' , 'VS100COMNTOOLS' , 'VS90COMNTOOLS' , 'VS80COMNTOOLS' , 'VS71COMNTOOLS' , 'VS70COMNTOOLS' , 'VS60COMNTOOLS' , ] env [ 'ENV' ] = normalize_env ( env [ 'ENV' ] , vs_vc_vars , force = False ) if args : debug ( "Calling '%s %s'" % ( vcbat , args ) ) popen = SCons . Action . _subproc ( env , '"%s" %s & set' % ( vcbat , args ) , stdin = 'devnull' , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) else : debug ( "Calling '%s'" % vcbat ) popen = SCons . Action . _subproc ( env , '"%s" & set' % vcbat , stdin = 'devnull' , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) stdout = popen . stdout . read ( ) stderr = popen . stderr . read ( ) if stderr : import sys sys . stderr . write ( stderr ) if popen . wait ( ) != 0 : raise IOError ( stderr . decode ( "mbcs" ) ) output = stdout . decode ( "mbcs" ) return output | Parse the output of given bat file with given args . |
22,470 | def generate ( env ) : for t in SCons . Tool . tool_list ( env [ 'PLATFORM' ] , env ) : SCons . Tool . Tool ( t ) ( env ) | Add default tools . |
22,471 | def subst_path ( self , env , target , source ) : result = [ ] for type , value in self . pathlist : if type == TYPE_STRING_SUBST : value = env . subst ( value , target = target , source = source , conv = node_conv ) if SCons . Util . is_Sequence ( value ) : result . extend ( SCons . Util . flatten ( value ) ) elif value : result . append ( value ) elif type == TYPE_OBJECT : value = node_conv ( value ) if value : result . append ( value ) elif value : result . append ( value ) return tuple ( result ) | Performs construction variable substitution on a pre - digested PathList for a specific target and source . |
22,472 | def _PathList_key ( self , pathlist ) : if SCons . Util . is_Sequence ( pathlist ) : pathlist = tuple ( SCons . Util . flatten ( pathlist ) ) return pathlist | Returns the key for memoization of PathLists . |
22,473 | def PathList ( self , pathlist ) : pathlist = self . _PathList_key ( pathlist ) try : memo_dict = self . _memo [ 'PathList' ] except KeyError : memo_dict = { } self . _memo [ 'PathList' ] = memo_dict else : try : return memo_dict [ pathlist ] except KeyError : pass result = _PathList ( pathlist ) memo_dict [ pathlist ] = result return result | Returns the cached _PathList object for the specified pathlist creating and caching a new object as necessary . |
22,474 | def DefaultEnvironment ( * args , ** kw ) : global _default_env if not _default_env : import SCons . Util _default_env = SCons . Environment . Environment ( * args , ** kw ) if SCons . Util . md5 : _default_env . Decider ( 'MD5' ) else : _default_env . Decider ( 'timestamp-match' ) global DefaultEnvironment DefaultEnvironment = _fetch_DefaultEnvironment _default_env . _CacheDir_path = None return _default_env | Initial public entry point for creating the default construction Environment . |
22,475 | def _concat ( prefix , list , suffix , env , f = lambda x : x , target = None , source = None ) : if not list : return list l = f ( SCons . PathList . PathList ( list ) . subst_path ( env , target , source ) ) if l is not None : list = l return _concat_ixes ( prefix , list , suffix , env ) | Creates a new list from list by first interpolating each element in the list using the env dictionary and then calling f on the list and finally calling _concat_ixes to concatenate prefix and suffix onto each element of the list . |
22,476 | def _concat_ixes ( prefix , list , suffix , env ) : result = [ ] prefix = str ( env . subst ( prefix , SCons . Subst . SUBST_RAW ) ) suffix = str ( env . subst ( suffix , SCons . Subst . SUBST_RAW ) ) for x in list : if isinstance ( x , SCons . Node . FS . File ) : result . append ( x ) continue x = str ( x ) if x : if prefix : if prefix [ - 1 ] == ' ' : result . append ( prefix [ : - 1 ] ) elif x [ : len ( prefix ) ] != prefix : x = prefix + x result . append ( x ) if suffix : if suffix [ 0 ] == ' ' : result . append ( suffix [ 1 : ] ) elif x [ - len ( suffix ) : ] != suffix : result [ - 1 ] = result [ - 1 ] + suffix return result | Creates a new list from list by concatenating the prefix and suffix arguments onto each element of the list . A trailing space on prefix or leading space on suffix will cause them to be put into separate list elements rather than being concatenated . |
22,477 | def processDefines ( defs ) : if SCons . Util . is_List ( defs ) : l = [ ] for d in defs : if d is None : continue elif SCons . Util . is_List ( d ) or isinstance ( d , tuple ) : if len ( d ) >= 2 : l . append ( str ( d [ 0 ] ) + '=' + str ( d [ 1 ] ) ) else : l . append ( str ( d [ 0 ] ) ) elif SCons . Util . is_Dict ( d ) : for macro , value in d . items ( ) : if value is not None : l . append ( str ( macro ) + '=' + str ( value ) ) else : l . append ( str ( macro ) ) elif SCons . Util . is_String ( d ) : l . append ( str ( d ) ) else : raise SCons . Errors . UserError ( "DEFINE %s is not a list, dict, string or None." % repr ( d ) ) elif SCons . Util . is_Dict ( defs ) : l = [ ] for k , v in sorted ( defs . items ( ) ) : if v is None : l . append ( str ( k ) ) else : l . append ( str ( k ) + '=' + str ( v ) ) else : l = [ str ( defs ) ] return l | process defines resolving strings lists dictionaries into a list of strings |
22,478 | def _defines ( prefix , defs , suffix , env , c = _concat_ixes ) : return c ( prefix , env . subst_path ( processDefines ( defs ) ) , suffix , env ) | A wrapper around _concat_ixes that turns a list or string into a list of C preprocessor command - line definitions . |
22,479 | def Scanner ( function , * args , ** kw ) : if SCons . Util . is_Dict ( function ) : return Selector ( function , * args , ** kw ) else : return Base ( function , * args , ** kw ) | Public interface factory function for creating different types of Scanners based on the different types of functions that may be supplied . |
22,480 | def ReportLength ( cls , header ) : parsed_header = cls . _parse_header ( header ) auth_size = cls . _AUTH_BLOCK_LENGTHS . get ( parsed_header . auth_type ) if auth_size is None : raise DataError ( "Unknown auth block size in BroadcastReport" ) return cls . _HEADER_LENGTH + parsed_header . reading_length + auth_size | Given a header of HeaderLength bytes calculate the size of this report . |
22,481 | def FromReadings ( cls , uuid , readings , sent_timestamp = 0 ) : header = struct . pack ( "<BBHLLL" , cls . ReportType , 0 , len ( readings ) * 16 , uuid , sent_timestamp , 0 ) packed_readings = bytearray ( ) for reading in readings : packed_reading = struct . pack ( "<HHLLL" , reading . stream , 0 , reading . reading_id , reading . raw_time , reading . value ) packed_readings += bytearray ( packed_reading ) return BroadcastReport ( bytearray ( header ) + packed_readings ) | Generate a broadcast report from a list of readings and a uuid . |
22,482 | def decode ( self ) : parsed_header = self . _parse_header ( self . raw_report [ : self . _HEADER_LENGTH ] ) auth_size = self . _AUTH_BLOCK_LENGTHS . get ( parsed_header . auth_type ) assert auth_size is not None assert parsed_header . reading_length % 16 == 0 time_base = self . received_time - datetime . timedelta ( seconds = parsed_header . sent_timestamp ) readings = self . raw_report [ self . _HEADER_LENGTH : self . _HEADER_LENGTH + parsed_header . reading_length ] parsed_readings = [ ] for i in range ( 0 , len ( readings ) , 16 ) : reading = readings [ i : i + 16 ] stream , _ , reading_id , timestamp , value = struct . unpack ( "<HHLLL" , reading ) parsed = IOTileReading ( timestamp , stream , value , time_base = time_base , reading_id = reading_id ) parsed_readings . append ( parsed ) self . sent_timestamp = parsed_header . sent_timestamp self . origin = parsed_header . uuid return parsed_readings , [ ] | Decode this report into a list of visible readings . |
22,483 | def start ( self , device ) : super ( NativeBLEVirtualInterface , self ) . start ( device ) self . set_advertising ( True ) | Start serving access to this VirtualIOTileDevice |
22,484 | def register_gatt_table ( self ) : services = [ BLEService , TileBusService ] characteristics = [ NameChar , AppearanceChar , ReceiveHeaderChar , ReceivePayloadChar , SendHeaderChar , SendPayloadChar , StreamingChar , HighSpeedChar , TracingChar ] self . bable . set_gatt_table ( services , characteristics ) | Register the GATT table into baBLE . |
22,485 | def set_advertising ( self , enabled ) : if enabled : self . bable . set_advertising ( enabled = True , uuids = [ TileBusService . uuid ] , name = "V_IOTile " , company_id = ArchManuID , advertising_data = self . _advertisement ( ) , scan_response = self . _scan_response ( ) , sync = True ) else : try : self . bable . set_advertising ( enabled = False , sync = True ) except bable_interface . BaBLEException : pass | Toggle advertising . |
22,486 | def _advertisement ( self ) : flags = int ( self . device . pending_data ) | ( 0 << 1 ) | ( 0 << 2 ) | ( 1 << 3 ) | ( 1 << 4 ) return struct . pack ( "<LH" , self . device . iotile_id , flags ) | Create advertisement data . |
22,487 | def _scan_response ( self ) : voltage = struct . pack ( "<H" , int ( self . voltage * 256 ) ) reading = struct . pack ( "<HLLL" , 0xFFFF , 0 , 0 , 0 ) response = voltage + reading return response | Create scan response data . |
22,488 | def stop_sync ( self ) : if self . connected : self . disconnect_sync ( self . _connection_handle ) self . set_advertising ( False ) self . bable . stop ( ) self . actions . queue . clear ( ) | Safely stop this BLED112 instance without leaving it in a weird state . |
22,489 | def disconnect_sync ( self , connection_handle ) : self . bable . disconnect ( connection_handle = connection_handle , sync = True ) | Synchronously disconnect from whoever has connected to us |
22,490 | def _stream_data ( self , chunk = None ) : self . _stream_sm_running = True if chunk is None : chunk = self . _next_streaming_chunk ( 20 ) if chunk is None or len ( chunk ) == 0 : self . _stream_sm_running = False return try : self . _send_notification ( StreamingChar . value_handle , chunk ) self . _defer ( self . _stream_data ) except bable_interface . BaBLEException as err : if err . packet . status == 'Rejected' : time . sleep ( 0.05 ) self . _defer ( self . _stream_data , [ chunk ] ) else : self . _audit ( 'ErrorStreamingReport' ) self . _logger . exception ( "Error while streaming data" ) | Stream reports to the ble client in 20 byte chunks |
22,491 | def _send_trace ( self , chunk = None ) : self . _trace_sm_running = True if chunk is None : chunk = self . _next_tracing_chunk ( 20 ) if chunk is None or len ( chunk ) == 0 : self . _trace_sm_running = False return try : self . _send_notification ( TracingChar . value_handle , chunk ) self . _defer ( self . _send_trace ) except bable_interface . BaBLEException as err : if err . packet . status == 'Rejected' : time . sleep ( 0.05 ) self . _defer ( self . _send_trace , [ chunk ] ) else : self . _audit ( 'ErrorStreamingTrace' ) self . _logger . exception ( "Error while tracing data" ) | Stream tracing data to the ble client in 20 byte chunks |
22,492 | def process ( self ) : super ( NativeBLEVirtualInterface , self ) . process ( ) if ( not self . _stream_sm_running ) and ( not self . reports . empty ( ) ) : self . _stream_data ( ) if ( not self . _trace_sm_running ) and ( not self . traces . empty ( ) ) : self . _send_trace ( ) | Periodic nonblocking processes |
22,493 | async def _populate_name_map ( self ) : services = await self . sync_services ( ) with self . _state_lock : self . services = services for i , name in enumerate ( self . services . keys ( ) ) : self . _name_map [ i ] = name | Populate the name map of services as reported by the supervisor |
22,494 | def local_service ( self , name_or_id ) : if not self . _loop . inside_loop ( ) : self . _state_lock . acquire ( ) try : if isinstance ( name_or_id , int ) : if name_or_id not in self . _name_map : raise ArgumentError ( "Unknown ID used to look up service" , id = name_or_id ) name = self . _name_map [ name_or_id ] else : name = name_or_id if name not in self . services : raise ArgumentError ( "Unknown service name" , name = name ) return copy ( self . services [ name ] ) finally : if not self . _loop . inside_loop ( ) : self . _state_lock . release ( ) | Get the locally synced information for a service . |
22,495 | def local_services ( self ) : if not self . _loop . inside_loop ( ) : self . _state_lock . acquire ( ) try : return sorted ( [ ( index , name ) for index , name in self . _name_map . items ( ) ] , key = lambda element : element [ 0 ] ) finally : if not self . _loop . inside_loop ( ) : self . _state_lock . release ( ) | Get a list of id name pairs for all of the known synced services . |
22,496 | async def sync_services ( self ) : services = { } servs = await self . list_services ( ) for i , serv in enumerate ( servs ) : info = await self . service_info ( serv ) status = await self . service_status ( serv ) messages = await self . get_messages ( serv ) headline = await self . get_headline ( serv ) services [ serv ] = states . ServiceState ( info [ 'short_name' ] , info [ 'long_name' ] , info [ 'preregistered' ] , i ) services [ serv ] . state = status [ 'numeric_status' ] for message in messages : services [ serv ] . post_message ( message . level , message . message , message . count , message . created ) if headline is not None : services [ serv ] . set_headline ( headline . level , headline . message , headline . created ) return services | Poll the current state of all services . |
22,497 | def post_state ( self , name , state ) : self . post_command ( OPERATIONS . CMD_UPDATE_STATE , { 'name' : name , 'new_status' : state } ) | Asynchronously try to update the state for a service . |
22,498 | def post_error ( self , name , message ) : self . post_command ( OPERATIONS . CMD_POST_MESSAGE , _create_message ( name , states . ERROR_LEVEL , message ) ) | Asynchronously post a user facing error message about a service . |
22,499 | def post_warning ( self , name , message ) : self . post_command ( OPERATIONS . CMD_POST_MESSAGE , _create_message ( name , states . WARNING_LEVEL , message ) ) | Asynchronously post a user facing warning message about a service . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.