idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
21,500 | def _tobinarray_really ( self , start , end , pad , size ) : if pad is None : pad = self . padding bin = array ( 'B' ) if self . _buf == { } and None in ( start , end ) : return bin if size is not None and size <= 0 : raise ValueError ( "tobinarray: wrong value for size" ) start , end = self . _get_start_end ( start , end , size ) for i in range_g ( start , end + 1 ) : bin . append ( self . _buf . get ( i , pad ) ) return bin | Return binary array . |
21,501 | def tobinstr ( self , start = None , end = None , pad = _DEPRECATED , size = None ) : if not isinstance ( pad , _DeprecatedParam ) : print ( "IntelHex.tobinstr: 'pad' parameter is deprecated." ) if pad is not None : print ( "Please, use IntelHex.padding attribute instead." ) else : print ( "Please, don't pass it explicitly." ) print ( "Use syntax like this: ih.tobinstr(start=xxx, end=yyy, size=zzz)" ) else : pad = None return self . _tobinstr_really ( start , end , pad , size ) | Convert to binary form and return as binary string . |
21,502 | def tobinfile ( self , fobj , start = None , end = None , pad = _DEPRECATED , size = None ) : if not isinstance ( pad , _DeprecatedParam ) : print ( "IntelHex.tobinfile: 'pad' parameter is deprecated." ) if pad is not None : print ( "Please, use IntelHex.padding attribute instead." ) else : print ( "Please, don't pass it explicitly." ) print ( "Use syntax like this: ih.tobinfile(start=xxx, end=yyy, size=zzz)" ) else : pad = None if getattr ( fobj , "write" , None ) is None : fobj = open ( fobj , "wb" ) close_fd = True else : close_fd = False fobj . write ( self . _tobinstr_really ( start , end , pad , size ) ) if close_fd : fobj . close ( ) | Convert to binary and write to file . |
21,503 | def todict ( self ) : r = { } r . update ( self . _buf ) if self . start_addr : r [ 'start_addr' ] = self . start_addr return r | Convert to python dictionary . |
21,504 | def tofile ( self , fobj , format ) : if format == 'hex' : self . write_hex_file ( fobj ) elif format == 'bin' : self . tobinfile ( fobj ) else : raise ValueError ( 'format should be either "hex" or "bin";' ' got %r instead' % format ) | Write data to hex or bin file . Preferred method over tobin or tohex . |
21,505 | def gets ( self , addr , length ) : a = array ( 'B' , asbytes ( '\0' * length ) ) try : for i in range_g ( length ) : a [ i ] = self . _buf [ addr + i ] except KeyError : raise NotEnoughDataError ( address = addr , length = length ) return array_tobytes ( a ) | Get string of bytes from given address . If any entries are blank from addr through addr + length a NotEnoughDataError exception will be raised . Padding is not used . |
21,506 | def puts ( self , addr , s ) : a = array ( 'B' , asbytes ( s ) ) for i in range_g ( len ( a ) ) : self . _buf [ addr + i ] = a [ i ] | Put string of bytes at given address . Will overwrite any previous entries . |
21,507 | def getsz ( self , addr ) : i = 0 try : while True : if self . _buf [ addr + i ] == 0 : break i += 1 except KeyError : raise NotEnoughDataError ( msg = ( 'Bad access at 0x%X: ' 'not enough data to read zero-terminated string' ) % addr ) return self . gets ( addr , i ) | Get zero - terminated bytes string from given address . Will raise NotEnoughDataError exception if a hole is encountered before a 0 . |
21,508 | def putsz ( self , addr , s ) : self . puts ( addr , s ) self . _buf [ addr + len ( s ) ] = 0 | Put bytes string in object at addr and append terminating zero at end . |
21,509 | def dump ( self , tofile = None , width = 16 , withpadding = False ) : if not isinstance ( width , int ) or width < 1 : raise ValueError ( 'width must be a positive integer.' ) width = int ( width ) if tofile is None : tofile = sys . stdout if self . start_addr is not None : cs = self . start_addr . get ( 'CS' ) ip = self . start_addr . get ( 'IP' ) eip = self . start_addr . get ( 'EIP' ) if eip is not None and cs is None and ip is None : tofile . write ( 'EIP = 0x%08X\n' % eip ) elif eip is None and cs is not None and ip is not None : tofile . write ( 'CS = 0x%04X, IP = 0x%04X\n' % ( cs , ip ) ) else : tofile . write ( 'start_addr = %r\n' % start_addr ) addresses = dict_keys ( self . _buf ) if addresses : addresses . sort ( ) minaddr = addresses [ 0 ] maxaddr = addresses [ - 1 ] startaddr = ( minaddr // width ) * width endaddr = ( ( maxaddr // width ) + 1 ) * width maxdigits = max ( len ( hex ( endaddr ) ) - 2 , 4 ) templa = '%%0%dX' % maxdigits rangewidth = range_l ( width ) if withpadding : pad = self . padding else : pad = None for i in range_g ( startaddr , endaddr , width ) : tofile . write ( templa % i ) tofile . write ( ' ' ) s = [ ] for j in rangewidth : x = self . _buf . get ( i + j , pad ) if x is not None : tofile . write ( ' %02X' % x ) if 32 <= x < 127 : s . append ( chr ( x ) ) else : s . append ( '.' ) else : tofile . write ( ' --' ) s . append ( ' ' ) tofile . write ( ' |' + '' . join ( s ) + '|\n' ) | Dump object content to specified file object or to stdout if None . Format is a hexdump with some header information at the beginning addresses on the left and data on right . |
21,510 | def segments ( self ) : addresses = self . addresses ( ) if not addresses : return [ ] elif len ( addresses ) == 1 : return ( [ ( addresses [ 0 ] , addresses [ 0 ] + 1 ) ] ) adjacent_differences = [ ( b - a ) for ( a , b ) in zip ( addresses [ : - 1 ] , addresses [ 1 : ] ) ] breaks = [ i for ( i , x ) in enumerate ( adjacent_differences ) if x > 1 ] endings = [ addresses [ b ] for b in breaks ] endings . append ( addresses [ - 1 ] ) beginings = [ addresses [ b + 1 ] for b in breaks ] beginings . insert ( 0 , addresses [ 0 ] ) return [ ( a , b + 1 ) for ( a , b ) in zip ( beginings , endings ) ] | Return a list of ordered tuple objects representing contiguous occupied data addresses . Each tuple has a length of two and follows the semantics of the range and xrange objects . The second entry of the tuple is always an integer greater than the first entry . |
21,511 | def get_memory_size ( self ) : n = sys . getsizeof ( self ) n += sys . getsizeof ( self . padding ) n += total_size ( self . start_addr ) n += total_size ( self . _buf ) n += sys . getsizeof ( self . _offset ) return n | Returns the approximate memory footprint for data . |
21,512 | def _from_bytes ( bytes ) : assert len ( bytes ) >= 4 s = ( - sum ( bytes ) ) & 0x0FF bin = array ( 'B' , bytes + [ s ] ) return ':' + asstr ( hexlify ( array_tobytes ( bin ) ) ) . upper ( ) | Takes a list of bytes computes the checksum and outputs the entire record as a string . bytes should be the hex record without the colon or final checksum . |
21,513 | def create_release_settings_action ( target , source , env ) : with open ( str ( source [ 0 ] ) , "r" ) as fileobj : settings = json . load ( fileobj ) settings [ 'release' ] = True settings [ 'release_date' ] = datetime . datetime . utcnow ( ) . isoformat ( ) settings [ 'dependency_versions' ] = { } for dep in env [ 'TILE' ] . dependencies : tile = IOTile ( os . path . join ( 'build' , 'deps' , dep [ 'unique_id' ] ) ) settings [ 'dependency_versions' ] [ dep [ 'unique_id' ] ] = str ( tile . parsed_version ) with open ( str ( target [ 0 ] ) , "w" ) as fileobj : json . dump ( settings , fileobj , indent = 4 ) | Copy module_settings . json and add release and build information |
21,514 | def copy_extra_files ( tile ) : env = Environment ( tools = [ ] ) outputbase = os . path . join ( 'build' , 'output' ) for src , dest in tile . settings . get ( 'copy_files' , { } ) . items ( ) : outputfile = os . path . join ( outputbase , dest ) env . Command ( [ outputfile ] , [ src ] , Copy ( "$TARGET" , "$SOURCE" ) ) resolver = ProductResolver . Create ( ) for src , dest in tile . settings . get ( 'copy_products' , { } ) . items ( ) : prod = resolver . find_unique ( None , src ) outputfile = os . path . join ( outputbase , dest ) env . Command ( [ outputfile ] , [ prod . full_path ] , Copy ( "$TARGET" , "$SOURCE" ) ) | Copy all files listed in a copy_files and copy_products section . |
21,515 | def generate ( env ) : static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in ASSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASAction ) shared_obj . add_action ( suffix , SCons . Defaults . ASAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) for suffix in ASPPSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASPPAction ) shared_obj . add_action ( suffix , SCons . Defaults . ASPPAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) env [ 'AS' ] = 'ml' env [ 'ASFLAGS' ] = SCons . Util . CLVar ( '/nologo' ) env [ 'ASPPFLAGS' ] = '$ASFLAGS' env [ 'ASCOM' ] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES' env [ 'ASPPCOM' ] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 1 | Add Builders and construction variables for masm to an Environment . |
21,516 | def median ( values ) : values . sort ( ) n = int ( len ( values ) / 2 ) return values [ n ] | Return median value for the list of values . |
21,517 | def time_coef ( tc , nc , tb , nb ) : tc = float ( tc ) nc = float ( nc ) tb = float ( tb ) nb = float ( nb ) q = ( tc * nb ) / ( tb * nc ) return q | Return time coefficient relative to base numbers . |
21,518 | def main ( argv = None ) : import getopt test_read = None test_write = None n = 3 if argv is None : argv = sys . argv [ 1 : ] try : opts , args = getopt . getopt ( argv , 'hn:rw' , [ ] ) for o , a in opts : if o == '-h' : print ( HELP ) return 0 elif o == '-n' : n = int ( a ) elif o == '-r' : test_read = True elif o == '-w' : test_write = True if args : raise getopt . GetoptError ( 'Arguments are not used.' ) except getopt . GetoptError : msg = sys . exc_info ( ) [ 1 ] txt = str ( msg ) print ( txt ) return 1 if ( test_read , test_write ) == ( None , None ) : test_read = test_write = True m = Measure ( n , test_read , test_write ) m . measure_all ( ) m . print_report ( ) return 0 | Main function to run benchmarks . |
21,519 | def measure_one ( self , data ) : _unused , hexstr , ih = data tread , twrite = 0.0 , 0.0 if self . read : tread = run_readtest_N_times ( intelhex . IntelHex , hexstr , self . n ) [ 0 ] if self . write : twrite = run_writetest_N_times ( ih . write_hex_file , self . n ) [ 0 ] return tread , twrite | Do measuring of read and write operations . |
21,520 | def _get_key ( cls , device_id ) : var_name = "USER_KEY_{0:08X}" . format ( device_id ) if var_name not in os . environ : raise NotFoundError ( "No user key could be found for devices" , device_id = device_id , expected_variable_name = var_name ) key_var = os . environ [ var_name ] if len ( key_var ) != 64 : raise NotFoundError ( "User key in variable is not the correct length, should be 64 hex characters" , device_id = device_id , key_value = key_var ) try : key = binascii . unhexlify ( key_var ) except ValueError : raise NotFoundError ( "User key in variable could not be decoded from hex" , device_id = device_id , key_value = key_var ) if len ( key ) != 32 : raise NotFoundError ( "User key in variable is not the correct length, should be 64 hex characters" , device_id = device_id , key_value = key_var ) return key | Attempt to get a user key from an environment variable |
21,521 | def decrypt_report ( self , device_id , root , data , ** kwargs ) : report_key = self . _verify_derive_key ( device_id , root , ** kwargs ) try : from Crypto . Cipher import AES import Crypto . Util . Counter except ImportError : raise NotFoundError ctr = Crypto . Util . Counter . new ( 128 ) encryptor = AES . new ( bytes ( report_key [ : 16 ] ) , AES . MODE_CTR , counter = ctr ) decrypted = encryptor . decrypt ( bytes ( data ) ) return { 'data' : decrypted } | Decrypt a buffer of report data on behalf of a device . |
21,522 | def join_path ( path ) : if isinstance ( path , str ) : return path return os . path . join ( * path ) | If given a string return it otherwise combine a list into a string using os . path . join |
21,523 | def build_defines ( defines ) : return [ '-D"%s=%s"' % ( x , str ( y ) ) for x , y in defines . items ( ) if y is not None ] | Build a list of - D directives to pass to the compiler . |
21,524 | def _open_interface ( self , conn_id , iface , callback ) : try : context = self . conns . get_context ( conn_id ) except ArgumentError : callback ( conn_id , self . id , False , "Could not find connection information" ) return self . conns . begin_operation ( conn_id , 'open_interface' , callback , self . get_config ( 'default_timeout' ) ) topics = context [ 'topics' ] open_iface_message = { 'key' : context [ 'key' ] , 'type' : 'command' , 'operation' : 'open_interface' , 'client' : self . name , 'interface' : iface } self . client . publish ( topics . action , open_iface_message ) | Open an interface on this device |
21,525 | def stop_sync ( self ) : conn_ids = self . conns . get_connections ( ) for conn in list ( conn_ids ) : try : self . disconnect_sync ( conn ) except HardwareError : pass self . client . disconnect ( ) self . conns . stop ( ) | Synchronously stop this adapter |
21,526 | def probe_async ( self , callback ) : topics = MQTTTopicValidator ( self . prefix ) self . client . publish ( topics . probe , { 'type' : 'command' , 'operation' : 'probe' , 'client' : self . name } ) callback ( self . id , True , None ) | Probe for visible devices connected to this DeviceAdapter . |
21,527 | def periodic_callback ( self ) : while True : try : action = self . _deferred . get ( False ) action ( ) except queue . Empty : break except Exception : self . _logger . exception ( 'Exception in periodic callback' ) | Periodically help maintain adapter internal state |
21,528 | def _bind_topics ( self , topics ) : self . client . subscribe ( topics . status , self . _on_status_message ) self . client . subscribe ( topics . tracing , self . _on_trace ) self . client . subscribe ( topics . streaming , self . _on_report ) self . client . subscribe ( topics . response , self . _on_response_message ) | Subscribe to all the topics we need to communication with this device |
21,529 | def _unbind_topics ( self , topics ) : self . client . unsubscribe ( topics . status ) self . client . unsubscribe ( topics . tracing ) self . client . unsubscribe ( topics . streaming ) self . client . unsubscribe ( topics . response ) | Unsubscribe to all of the topics we needed for communication with device |
21,530 | def _find_connection ( self , topic ) : parts = topic . split ( '/' ) if len ( parts ) < 3 : return None slug = parts [ - 3 ] return slug | Attempt to find a connection id corresponding with a topic |
21,531 | def _on_report ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping report message that does not correspond with a known connection, topic=%s" , topic ) return try : rep_msg = messages . ReportNotification . verify ( message ) serialized_report = { } serialized_report [ 'report_format' ] = rep_msg [ 'report_format' ] serialized_report [ 'encoded_report' ] = rep_msg [ 'report' ] serialized_report [ 'received_time' ] = datetime . datetime . strptime ( rep_msg [ 'received_time' ] . encode ( ) . decode ( ) , "%Y%m%dT%H:%M:%S.%fZ" ) report = self . report_parser . deserialize_report ( serialized_report ) self . _trigger_callback ( 'on_report' , conn_id , report ) except Exception : self . _logger . exception ( "Error processing report conn_id=%d" , conn_id ) | Process a report received from a device . |
21,532 | def _on_trace ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping trace message that does not correspond with a known connection, topic=%s" , topic ) return try : tracing = messages . TracingNotification . verify ( message ) self . _trigger_callback ( 'on_trace' , conn_id , tracing [ 'trace' ] ) except Exception : self . _logger . exception ( "Error processing trace conn_id=%d" , conn_id ) | Process a trace received from a device . |
21,533 | def _on_status_message ( self , sequence , topic , message ) : self . _logger . debug ( "Received message on (topic=%s): %s" % ( topic , message ) ) try : conn_key = self . _find_connection ( topic ) except ArgumentError : self . _logger . warn ( "Dropping message that does not correspond with a known connection, message=%s" , message ) return if messages . ConnectionResponse . matches ( message ) : if self . name != message [ 'client' ] : self . _logger . debug ( "Connection response received for a different client, client=%s, name=%s" , message [ 'client' ] , self . name ) return self . conns . finish_connection ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) else : self . _logger . warn ( "Dropping message that did not correspond with a known schema, message=%s" , message ) | Process a status message received |
21,534 | def _on_response_message ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) context = self . conns . get_context ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping message that does not correspond with a known connection, message=%s" , message ) return if 'client' in message and message [ 'client' ] != self . name : self . _logger . debug ( "Dropping message that is for another client %s, we are %s" , message [ 'client' ] , self . name ) if messages . DisconnectionResponse . matches ( message ) : self . conns . finish_disconnection ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . OpenInterfaceResponse . matches ( message ) : self . conns . finish_operation ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . RPCResponse . matches ( message ) : rpc_message = messages . RPCResponse . verify ( message ) self . conns . finish_operation ( conn_key , rpc_message [ 'success' ] , rpc_message . get ( 'failure_reason' , None ) , rpc_message . get ( 'status' , None ) , rpc_message . get ( 'payload' , None ) ) elif messages . ProgressNotification . matches ( message ) : progress_callback = context . get ( 'progress_callback' , None ) if progress_callback is not None : progress_callback ( message [ 'done_count' ] , message [ 'total_count' ] ) elif messages . ScriptResponse . matches ( message ) : if 'progress_callback' in context : del context [ 'progress_callback' ] self . conns . finish_operation ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . DisconnectionNotification . matches ( message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping disconnect notification that does not correspond with a known connection, topic=%s" , topic ) return self . conns . unexpected_disconnect ( conn_key ) self . _trigger_callback ( 'on_disconnect' , self . id , conn_id ) else : self . _logger . warn ( "Invalid response message received, message=%s" , message ) | Process a response message received |
21,535 | def write_output ( output , text = True , output_path = None ) : if output_path is None and text is False : print ( "ERROR: You must specify an output file using -o/--output for binary output formats" ) sys . exit ( 1 ) if output_path is not None : if text : outfile = open ( output_path , "w" , encoding = "utf-8" ) else : outfile = open ( output_path , "wb" ) else : outfile = sys . stdout try : if text and isinstance ( output , bytes ) : output = output . decode ( 'utf-8' ) outfile . write ( output ) finally : if outfile is not sys . stdout : outfile . close ( ) | Write binary or text output to a file or stdout . |
21,536 | def main ( ) : arg_parser = build_args ( ) args = arg_parser . parse_args ( ) model = DeviceModel ( ) parser = SensorGraphFileParser ( ) parser . parse_file ( args . sensor_graph ) if args . format == u'ast' : write_output ( parser . dump_tree ( ) , True , args . output ) sys . exit ( 0 ) parser . compile ( model ) if not args . disable_optimizer : opt = SensorGraphOptimizer ( ) opt . optimize ( parser . sensor_graph , model = model ) if args . format == u'nodes' : output = u'\n' . join ( parser . sensor_graph . dump_nodes ( ) ) + u'\n' write_output ( output , True , args . output ) else : if args . format not in KNOWN_FORMATS : print ( "Unknown output format: {}" . format ( args . format ) ) sys . exit ( 1 ) output_format = KNOWN_FORMATS [ args . format ] output = output_format . format ( parser . sensor_graph ) write_output ( output , output_format . text , args . output ) | Main entry point for iotile - sgcompile . |
21,537 | def load_external_components ( typesys ) : from iotile . core . dev . registry import ComponentRegistry reg = ComponentRegistry ( ) modules = reg . list_components ( ) typelibs = reduce ( lambda x , y : x + y , [ reg . find_component ( x ) . find_products ( 'type_package' ) for x in modules ] , [ ] ) for lib in typelibs : if lib . endswith ( '.py' ) : lib = lib [ : - 3 ] typesys . load_external_types ( lib ) | Load all external types defined by iotile plugins . |
21,538 | def add_recipe_folder ( self , recipe_folder , whitelist = None ) : if whitelist is not None : whitelist = set ( whitelist ) if recipe_folder == '' : recipe_folder = '.' for yaml_file in [ x for x in os . listdir ( recipe_folder ) if x . endswith ( '.yaml' ) ] : if whitelist is not None and yaml_file not in whitelist : continue recipe = RecipeObject . FromFile ( os . path . join ( recipe_folder , yaml_file ) , self . _recipe_actions , self . _recipe_resources ) self . _recipes [ recipe . name ] = recipe for ship_file in [ x for x in os . listdir ( recipe_folder ) if x . endswith ( '.ship' ) ] : if whitelist is not None and ship_file not in whitelist : continue recipe = RecipeObject . FromArchive ( os . path . join ( recipe_folder , ship_file ) , self . _recipe_actions , self . _recipe_resources ) self . _recipes [ recipe . name ] = recipe | Add all recipes inside a folder to this RecipeManager with an optional whitelist . |
21,539 | def add_recipe_actions ( self , recipe_actions ) : for action_name , action in recipe_actions : self . _recipe_actions [ action_name ] = action | Add additional valid recipe actions to RecipeManager |
21,540 | def get_recipe ( self , recipe_name ) : if recipe_name . endswith ( '.yaml' ) : recipe = self . _recipes . get ( RecipeObject . FromFile ( recipe_name , self . _recipe_actions , self . _recipe_resources ) . name ) else : recipe = self . _recipes . get ( recipe_name ) if recipe is None : raise RecipeNotFoundError ( "Could not find recipe" , recipe_name = recipe_name , known_recipes = [ x for x in self . _recipes . keys ( ) ] ) return recipe | Get a recipe by name . |
21,541 | def _check_time_backwards ( self ) : now = time . time ( ) if now < self . start : self . start = now self . end = self . start + self . length | Make sure a clock reset didn t cause time to go backwards |
21,542 | def expired ( self ) : if self . _expired_latch : return True self . _check_time_backwards ( ) if time . time ( ) > self . end : self . _expired_latch = True return True return False | Boolean property if this timeout has expired |
21,543 | def command ( self , cmd_name , callback , * args ) : cmd = JLinkCommand ( cmd_name , args , callback ) self . _commands . put ( cmd ) | Run an asynchronous command . |
21,544 | def _send_rpc ( self , device_info , control_info , address , rpc_id , payload , poll_interval , timeout ) : write_address , write_data = control_info . format_rpc ( address , rpc_id , payload ) self . _jlink . memory_write32 ( write_address , write_data ) self . _trigger_rpc ( device_info ) start = monotonic ( ) now = start poll_address , poll_mask = control_info . poll_info ( ) while ( now - start ) < timeout : time . sleep ( poll_interval ) value , = self . _jlink . memory_read8 ( poll_address , 1 ) if value & poll_mask : break now = monotonic ( ) if ( now - start ) >= timeout : raise HardwareError ( "Timeout waiting for RPC response" , timeout = timeout , poll_interval = poll_interval ) read_address , read_length = control_info . response_info ( ) read_data = self . _read_memory ( read_address , read_length , join = True ) return control_info . format_response ( read_data ) | Write and trigger an RPC . |
21,545 | def _send_script ( self , device_info , control_info , script , progress_callback ) : for i in range ( 0 , len ( script ) , 20 ) : chunk = script [ i : i + 20 ] self . _send_rpc ( device_info , control_info , 8 , 0x2101 , chunk , 0.001 , 1.0 ) if progress_callback is not None : progress_callback ( i + len ( chunk ) , len ( script ) ) | Send a script by repeatedly sending it as a bunch of RPCs . |
21,546 | def _trigger_rpc ( self , device_info ) : method = device_info . rpc_trigger if isinstance ( method , devices . RPCTriggerViaSWI ) : self . _jlink . memory_write32 ( method . register , [ 1 << method . bit ] ) else : raise HardwareError ( "Unknown RPC trigger method" , method = method ) | Trigger an RPC in a device specific way . |
21,547 | def _find_control_structure ( self , start_address , search_length ) : words = self . _read_memory ( start_address , search_length , chunk_size = 4 , join = False ) found_offset = None for i , word in enumerate ( words ) : if word == ControlStructure . CONTROL_MAGIC_1 : if ( len ( words ) - i ) < 4 : continue if words [ i + 1 ] == ControlStructure . CONTROL_MAGIC_2 and words [ i + 2 ] == ControlStructure . CONTROL_MAGIC_3 and words [ i + 3 ] == ControlStructure . CONTROL_MAGIC_4 : found_offset = i break if found_offset is None : raise HardwareError ( "Could not find control structure magic value in search area" ) struct_info = words [ found_offset + 4 ] _version , _flags , length = struct . unpack ( "<BBH" , struct . pack ( "<L" , struct_info ) ) if length % 4 != 0 : raise HardwareError ( "Invalid control structure length that was not a multiple of 4" , length = length ) word_length = length // 4 control_data = struct . pack ( "<%dL" % word_length , * words [ found_offset : found_offset + word_length ] ) logger . info ( "Found control stucture at address 0x%08X, word_length=%d" , start_address + 4 * found_offset , word_length ) return ControlStructure ( start_address + 4 * found_offset , control_data ) | Find the control structure in RAM for this device . |
21,548 | def _verify_control_structure ( self , device_info , control_info = None ) : if control_info is None : control_info = self . _find_control_structure ( device_info . ram_start , device_info . ram_size ) return control_info | Verify that a control structure is still valid or find one . |
21,549 | def save ( self , out_path ) : out = { 'selectors' : [ str ( x ) for x in self . selectors ] , 'trace' : [ { 'stream' : str ( DataStream . FromEncoded ( x . stream ) ) , 'time' : x . raw_time , 'value' : x . value , 'reading_id' : x . reading_id } for x in self ] } with open ( out_path , "wb" ) as outfile : json . dump ( out , outfile , indent = 4 ) | Save an ascii representation of this simulation trace . |
21,550 | def FromFile ( cls , in_path ) : with open ( in_path , "rb" ) as infile : in_data = json . load ( infile ) if not ( 'trace' , 'selectors' ) in in_data : raise ArgumentError ( "Invalid trace file format" , keys = in_data . keys ( ) , expected = ( 'trace' , 'selectors' ) ) selectors = [ DataStreamSelector . FromString ( x ) for x in in_data [ 'selectors' ] ] readings = [ IOTileReading ( x [ 'time' ] , DataStream . FromString ( x [ 'stream' ] ) . encode ( ) , x [ 'value' ] , reading_id = x [ 'reading_id' ] ) for x in in_data [ 'trace' ] ] return SimulationTrace ( readings , selectors = selectors ) | Load a previously saved ascii representation of this simulation trace . |
21,551 | def _on_scan ( _loop , adapter , _adapter_id , info , expiration_time ) : info [ 'validity_period' ] = expiration_time adapter . notify_event_nowait ( info . get ( 'connection_string' ) , 'device_seen' , info ) | Callback when a new device is seen . |
21,552 | def _on_report ( _loop , adapter , conn_id , report ) : conn_string = None if conn_id is not None : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if isinstance ( report , BroadcastReport ) : adapter . notify_event_nowait ( conn_string , 'broadcast' , report ) elif conn_string is not None : adapter . notify_event_nowait ( conn_string , 'report' , report ) else : adapter . _logger . debug ( "Dropping report with unknown conn_id=%s" , conn_id ) | Callback when a report is received . |
21,553 | def _on_trace ( _loop , adapter , conn_id , trace ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : adapter . _logger . debug ( "Dropping trace data with unknown conn_id=%s" , conn_id ) return adapter . notify_event_nowait ( conn_string , 'trace' , trace ) | Callback when tracing data is received . |
21,554 | def _on_disconnect ( _loop , adapter , _adapter_id , conn_id ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : adapter . _logger . debug ( "Dropping disconnect notification with unknown conn_id=%s" , conn_id ) return adapter . _teardown_connection ( conn_id , force = True ) event = dict ( reason = 'no reason passed from legacy adapter' , expected = False ) adapter . notify_event_nowait ( conn_string , 'disconnection' , event ) | Callback when a device disconnects unexpectedly . |
21,555 | def _on_progress ( adapter , operation , conn_id , done , total ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : return adapter . notify_progress ( conn_string , operation , done , total ) | Callback when progress is reported . |
21,556 | async def start ( self ) : self . _loop . add_task ( self . _periodic_loop , name = "periodic task for %s" % self . _adapter . __class__ . __name__ , parent = self . _task ) self . _adapter . add_callback ( 'on_scan' , functools . partial ( _on_scan , self . _loop , self ) ) self . _adapter . add_callback ( 'on_report' , functools . partial ( _on_report , self . _loop , self ) ) self . _adapter . add_callback ( 'on_trace' , functools . partial ( _on_trace , self . _loop , self ) ) self . _adapter . add_callback ( 'on_disconnect' , functools . partial ( _on_disconnect , self . _loop , self ) ) | Start the device adapter . |
21,557 | async def stop ( self , _task = None ) : self . _logger . info ( "Stopping adapter wrapper" ) if self . _task . stopped : return for task in self . _task . subtasks : await task . stop ( ) self . _logger . debug ( "Stopping underlying adapter %s" , self . _adapter . __class__ . __name__ ) await self . _execute ( self . _adapter . stop_sync ) | Stop the device adapter . |
21,558 | async def probe ( self ) : resp = await self . _execute ( self . _adapter . probe_sync ) _raise_error ( None , 'probe' , resp ) | Probe for devices connected to this adapter . |
21,559 | async def send_script ( self , conn_id , data ) : progress_callback = functools . partial ( _on_progress , self , 'script' , conn_id ) resp = await self . _execute ( self . _adapter . send_script_sync , conn_id , data , progress_callback ) _raise_error ( conn_id , 'send_rpc' , resp ) | Send a a script to a device . |
21,560 | def autobuild_shiparchive ( src_file ) : if not src_file . endswith ( '.tpl' ) : raise BuildError ( "You must pass a .tpl file to autobuild_shiparchive" , src_file = src_file ) env = Environment ( tools = [ ] ) family = ArchitectureGroup ( 'module_settings.json' ) target = family . platform_independent_target ( ) resolver = ProductResolver . Create ( ) custom_steps = [ ] for build_step in family . tile . find_products ( 'build_step' ) : full_file_name = build_step . split ( ":" ) [ 0 ] basename = os . path . splitext ( os . path . basename ( full_file_name ) ) [ 0 ] folder = os . path . dirname ( full_file_name ) fileobj , pathname , description = imp . find_module ( basename , [ folder ] ) mod = imp . load_module ( basename , fileobj , pathname , description ) full_file_name , class_name = build_step . split ( ":" ) custom_steps . append ( ( class_name , getattr ( mod , class_name ) ) ) env [ 'CUSTOM_STEPS' ] = custom_steps env [ "RESOLVER" ] = resolver base_name , tpl_name = _find_basename ( src_file ) yaml_name = tpl_name [ : - 4 ] ship_name = yaml_name [ : - 5 ] + ".ship" output_dir = target . build_dirs ( ) [ 'output' ] build_dir = os . path . join ( target . build_dirs ( ) [ 'build' ] , base_name ) tpl_path = os . path . join ( build_dir , tpl_name ) yaml_path = os . path . join ( build_dir , yaml_name ) ship_path = os . path . join ( build_dir , ship_name ) output_path = os . path . join ( output_dir , ship_name ) ship_deps = [ yaml_path ] env . Command ( [ tpl_path ] , [ src_file ] , Copy ( "$TARGET" , "$SOURCE" ) ) prod_deps = _find_product_dependencies ( src_file , resolver ) env . Command ( [ yaml_path ] , [ tpl_path ] , action = Action ( template_shipfile_action , "Rendering $TARGET" ) ) for prod in prod_deps : dest_file = os . path . join ( build_dir , prod . short_name ) ship_deps . append ( dest_file ) env . Command ( [ dest_file ] , [ prod . full_path ] , Copy ( "$TARGET" , "$SOURCE" ) ) env . Command ( [ ship_path ] , [ ship_deps ] , action = Action ( create_shipfile , "Archiving Ship Recipe $TARGET" ) ) env . Command ( [ output_path ] , [ ship_path ] , Copy ( "$TARGET" , "$SOURCE" ) ) | Create a ship file archive containing a yaml_file and its dependencies . |
21,561 | def create_shipfile ( target , source , env ) : source_dir = os . path . dirname ( str ( source [ 0 ] ) ) recipe_name = os . path . basename ( str ( source [ 0 ] ) ) [ : - 5 ] resman = RecipeManager ( ) resman . add_recipe_actions ( env [ 'CUSTOM_STEPS' ] ) resman . add_recipe_folder ( source_dir , whitelist = [ os . path . basename ( str ( source [ 0 ] ) ) ] ) recipe = resman . get_recipe ( recipe_name ) recipe . archive ( str ( target [ 0 ] ) ) | Create a . ship file with all dependencies . |
21,562 | def record_trace ( self , selectors = None ) : if selectors is None : selectors = [ x . selector for x in self . sensor_graph . streamers ] self . trace = SimulationTrace ( selectors = selectors ) for sel in selectors : self . sensor_graph . sensor_log . watch ( sel , self . _on_trace_callback ) | Record a trace of readings produced by this simulator . |
21,563 | def step ( self , input_stream , value ) : reading = IOTileReading ( input_stream . encode ( ) , self . tick_count , value ) self . sensor_graph . process_input ( input_stream , reading , self . rpc_executor ) | Step the sensor graph through one since input . |
21,564 | def run ( self , include_reset = True , accelerated = True ) : self . _start_tick = self . tick_count if self . _check_stop_conditions ( self . sensor_graph ) : return if include_reset : pass i = None for i , stim in enumerate ( self . stimuli ) : if stim . time != 0 : break reading = IOTileReading ( self . tick_count , stim . stream . encode ( ) , stim . value ) self . sensor_graph . process_input ( stim . stream , reading , self . rpc_executor ) if i is not None and i > 0 : self . stimuli = self . stimuli [ i : ] while not self . _check_stop_conditions ( self . sensor_graph ) : now = monotonic ( ) next_tick = now + 1.0 self . tick_count += 1 i = None for i , stim in enumerate ( self . stimuli ) : if stim . time != self . tick_count : break reading = IOTileReading ( self . tick_count , stim . stream . encode ( ) , stim . value ) self . sensor_graph . process_input ( stim . stream , reading , self . rpc_executor ) if i is not None and i > 0 : self . stimuli = self . stimuli [ i : ] self . _check_additional_ticks ( self . tick_count ) if ( self . tick_count % 10 ) == 0 : reading = IOTileReading ( self . tick_count , system_tick . encode ( ) , self . tick_count ) self . sensor_graph . process_input ( system_tick , reading , self . rpc_executor ) reading = IOTileReading ( self . tick_count , battery_voltage . encode ( ) , int ( self . voltage * 65536 ) ) self . sensor_graph . process_input ( battery_voltage , reading , self . rpc_executor ) now = monotonic ( ) if ( not accelerated ) and ( now < next_tick ) : time . sleep ( next_tick - now ) | Run this sensor graph until a stop condition is hit . |
21,565 | def _check_stop_conditions ( self , sensor_graph ) : for stop in self . stop_conditions : if stop . should_stop ( self . tick_count , self . tick_count - self . _start_tick , sensor_graph ) : return True return False | Check if any of our stop conditions are met . |
21,566 | def stimulus ( self , stimulus ) : if not isinstance ( stimulus , SimulationStimulus ) : stimulus = SimulationStimulus . FromString ( stimulus ) self . stimuli . append ( stimulus ) self . stimuli . sort ( key = lambda x : x . time ) | Add a simulation stimulus at a given time . |
21,567 | def stop_condition ( self , condition ) : for cond_format in self . _known_conditions : try : cond = cond_format . FromString ( condition ) self . stop_conditions . append ( cond ) return except ArgumentError : continue raise ArgumentError ( "Stop condition could not be processed by any known StopCondition type" , condition = condition , suggestion = "It may be mistyped or otherwise invalid." ) | Add a stop condition to this simulation . |
21,568 | def dump ( self ) : walker = self . dump_walker if walker is not None : walker = walker . dump ( ) state = { 'storage' : self . storage . dump ( ) , 'dump_walker' : walker , 'next_id' : self . next_id } return state | Serialize the state of this subsystem into a dict . |
21,569 | def clear ( self , timestamp ) : self . storage . clear ( ) self . push ( streams . DATA_CLEARED , timestamp , 1 ) | Clear all data from the RSL . |
21,570 | def push ( self , stream_id , timestamp , value ) : stream = DataStream . FromEncoded ( stream_id ) reading = IOTileReading ( stream_id , timestamp , value ) try : self . storage . push ( stream , reading ) return Error . NO_ERROR except StorageFullError : return pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . RING_BUFFER_FULL ) | Push a value to a stream . |
21,571 | def inspect_virtual ( self , stream_id ) : stream = DataStream . FromEncoded ( stream_id ) if stream . buffered : return [ pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . VIRTUAL_STREAM_NOT_FOUND ) , 0 ] try : reading = self . storage . inspect_last ( stream , only_allocated = True ) return [ Error . NO_ERROR , reading . value ] except StreamEmptyError : return [ Error . NO_ERROR , 0 ] except UnresolvedIdentifierError : return [ pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . VIRTUAL_STREAM_NOT_FOUND ) , 0 ] | Inspect the last value written into a virtual stream . |
21,572 | def dump_begin ( self , selector_id ) : if self . dump_walker is not None : self . storage . destroy_walker ( self . dump_walker ) selector = DataStreamSelector . FromEncoded ( selector_id ) self . dump_walker = self . storage . create_walker ( selector , skip_all = False ) return Error . NO_ERROR , Error . NO_ERROR , self . dump_walker . count ( ) | Start dumping a stream . |
21,573 | def dump_seek ( self , reading_id ) : if self . dump_walker is None : return ( pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . STREAM_WALKER_NOT_INITIALIZED ) , Error . NO_ERROR , 0 ) try : exact = self . dump_walker . seek ( reading_id , target = 'id' ) except UnresolvedIdentifierError : return ( pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . NO_MORE_READINGS ) , Error . NO_ERROR , 0 ) error = Error . NO_ERROR if not exact : error = pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . ID_FOUND_FOR_ANOTHER_STREAM ) return ( error , error . NO_ERROR , self . dump_walker . count ( ) ) | Seek the dump streamer to a given ID . |
21,574 | def dump_next ( self ) : if self . dump_walker is None : return pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . STREAM_WALKER_NOT_INITIALIZED ) try : return self . dump_walker . pop ( ) except StreamEmptyError : return None | Dump the next reading from the stream . |
21,575 | def highest_stored_id ( self ) : shared = [ 0 ] def _keep_max ( _i , reading ) : if reading . reading_id > shared [ 0 ] : shared [ 0 ] = reading . reading_id self . engine . scan_storage ( 'storage' , _keep_max ) self . engine . scan_storage ( 'streaming' , _keep_max ) return shared [ 0 ] | Scan through the stored readings and report the highest stored id . |
21,576 | def rsl_push_reading ( self , value , stream_id ) : err = self . sensor_log . push ( stream_id , 0 , value ) return [ err ] | Push a reading to the RSL directly . |
21,577 | def rsl_push_many_readings ( self , value , count , stream_id ) : for i in range ( 1 , count + 1 ) : err = self . sensor_log . push ( stream_id , 0 , value ) if err != Error . NO_ERROR : return [ err , i ] return [ Error . NO_ERROR , count ] | Push many copies of a reading to the RSL . |
21,578 | def rsl_count_readings ( self ) : storage , output = self . sensor_log . count ( ) return [ Error . NO_ERROR , storage , output ] | Count how many readings are stored in the RSL . |
21,579 | def rsl_dump_stream_begin ( self , stream_id ) : err , err2 , count = self . sensor_log . dump_begin ( stream_id ) return [ err , err2 , count , 0 ] | Begin dumping the contents of a stream . |
21,580 | def rsl_dump_stream_next ( self , output_format ) : timestamp = 0 stream_id = 0 value = 0 reading_id = 0 error = Error . NO_ERROR reading = self . sensor_log . dump_next ( ) if reading is not None : timestamp = reading . raw_time stream_id = reading . stream value = reading . value reading_id = reading . reading_id else : error = pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . NO_MORE_READINGS ) if output_format == 0 : return [ struct . pack ( "<LLL" , error , timestamp , value ) ] elif output_format != 1 : raise ValueError ( "Output format other than 1 not yet supported" ) return [ struct . pack ( "<LLLLH2x" , error , timestamp , value , reading_id , stream_id ) ] | Dump the next reading from the output stream . |
21,581 | def parse_size_name ( type_name ) : if ' ' in type_name : raise ArgumentError ( "There should not be a space in config variable type specifier" , specifier = type_name ) variable = False count = 1 base_type = type_name if type_name [ - 1 ] == ']' : variable = True start_index = type_name . find ( '[' ) if start_index == - 1 : raise ArgumentError ( "Could not find matching [ for ] character" , specifier = type_name ) count = int ( type_name [ start_index + 1 : - 1 ] , 0 ) base_type = type_name [ : start_index ] matched_type = TYPE_CODES . get ( base_type ) if matched_type is None : raise ArgumentError ( "Could not find base type name" , base_type = base_type , type_string = type_name ) base_size = struct . calcsize ( "<%s" % matched_type ) total_size = base_size * count return total_size , base_size , matched_type , variable | Calculate size and encoding from a type name . |
21,582 | def _validate_python_type ( self , python_type ) : if python_type == 'bool' : if self . variable : raise ArgumentError ( "You can only specify a bool python type on a scalar (non-array) type_name" , type_name = self . type_name ) return if python_type == 'string' : if not ( self . variable and self . unit_size == 1 ) : raise ArgumentError ( "You can only pass a string python type on an array of 1-byte objects" , type_name = self . type_name ) return if python_type is not None : raise ArgumentError ( "You can only declare a bool or string python type. Otherwise it must be passed as None" , python_type = python_type ) | Validate the possible combinations of python_type and type_name . |
21,583 | def _convert_default_value ( self , default ) : if default is None : return None if isinstance ( default , str ) : if self . special_type == 'string' : return default . encode ( 'utf-8' ) + b'\0' raise DataError ( "You can only pass a unicode string if you are declaring a string type config variable" , default = default ) if isinstance ( default , ( bytes , bytearray ) ) : if self . special_type == 'string' and isinstance ( default , bytes ) : default += b'\0' return default if isinstance ( default , int ) : default = [ default ] format_string = "<" + ( self . base_type * len ( default ) ) return struct . pack ( format_string , * default ) | Convert the passed default value to binary . |
21,584 | def clear ( self ) : if self . default_value is None : self . current_value = bytearray ( ) else : self . current_value = bytearray ( self . default_value ) | Clear this config variable to its reset value . |
21,585 | def update_value ( self , offset , value ) : if offset + len ( value ) > self . total_size : return Error . INPUT_BUFFER_TOO_LONG if len ( self . current_value ) < offset : self . current_value += bytearray ( offset - len ( self . current_value ) ) if len ( self . current_value ) > offset : self . current_value = self . current_value [ : offset ] self . current_value += bytearray ( value ) return 0 | Update the binary value currently stored for this config value . |
21,586 | def latch ( self ) : if len ( self . current_value ) == 0 : raise DataError ( "There was no data in a config variable during latching" , name = self . name ) remaining = len ( self . current_value ) % self . unit_size if remaining > 0 : self . current_value += bytearray ( remaining ) if self . special_type == 'string' : if self . current_value [ - 1 ] != 0 : raise DataError ( "String type was specified by data did not end with a null byte" , data = self . current_value , name = self . name ) return bytes ( self . current_value [ : - 1 ] ) . decode ( 'utf-8' ) fmt_code = "<" + ( self . base_type * ( len ( self . current_value ) // self . unit_size ) ) data = struct . unpack ( fmt_code , self . current_value ) if self . variable : data = list ( data ) else : data = data [ 0 ] if self . special_type == 'bool' : data = bool ( data ) return data | Convert the current value inside this config descriptor to a python object . |
21,587 | def declare_config_variable ( self , name , config_id , type_name , default = None , convert = None ) : config = ConfigDescriptor ( config_id , type_name , default , name = name , python_type = convert ) self . _config_variables [ config_id ] = config | Declare a config variable that this emulated tile accepts . |
21,588 | def latch_config_variables ( self ) : return { desc . name : desc . latch ( ) for desc in self . _config_variables . values ( ) } | Latch the current value of all config variables as python objects . |
21,589 | async def reset ( self ) : await self . _device . emulator . stop_tasks ( self . address ) self . _handle_reset ( ) self . _logger . info ( "Tile at address %d has reset itself." , self . address ) self . _logger . info ( "Starting main task for tile at address %d" , self . address ) self . _device . emulator . add_task ( self . address , self . _reset_vector ( ) ) | Synchronously reset a tile . |
21,590 | def list_config_variables ( self , offset ) : names = sorted ( self . _config_variables ) names = names [ offset : offset + 9 ] count = len ( names ) if len ( names ) < 9 : names += [ 0 ] * ( 9 - count ) return [ count ] + names | List defined config variables up to 9 at a time . |
21,591 | def describe_config_variable ( self , config_id ) : config = self . _config_variables . get ( config_id ) if config is None : return [ Error . INVALID_ARRAY_KEY , 0 , 0 , 0 , 0 ] packed_size = config . total_size packed_size |= int ( config . variable ) << 15 return [ 0 , 0 , 0 , config_id , packed_size ] | Describe the config variable by its id . |
21,592 | def set_config_variable ( self , config_id , offset , value ) : if self . initialized . is_set ( ) : return [ Error . STATE_CHANGE_AT_INVALID_TIME ] config = self . _config_variables . get ( config_id ) if config is None : return [ Error . INVALID_ARRAY_KEY ] error = config . update_value ( offset , value ) return [ error ] | Set a chunk of the current config value s value . |
21,593 | def get_config_variable ( self , config_id , offset ) : config = self . _config_variables . get ( config_id ) if config is None : return [ b"" ] return [ bytes ( config . current_value [ offset : offset + 20 ] ) ] | Get a chunk of a config variable s value . |
21,594 | def add_callback ( self , name , func ) : if name not in self . callbacks : raise ValueError ( "Unknown callback name: %s" % name ) self . callbacks [ name ] . add ( func ) | Add a callback when Device events happen |
21,595 | def connect_sync ( self , connection_id , connection_string ) : calldone = threading . Event ( ) results = { } def connect_done ( callback_connid , callback_adapterid , callback_success , failure_reason ) : results [ 'success' ] = callback_success results [ 'failure_reason' ] = failure_reason calldone . set ( ) self . connect_async ( connection_id , connection_string , connect_done ) calldone . wait ( ) return results | Synchronously connect to a device |
21,596 | def disconnect_sync ( self , conn_id ) : done = threading . Event ( ) result = { } def disconnect_done ( conn_id , adapter_id , status , reason ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason done . set ( ) self . disconnect_async ( conn_id , disconnect_done ) done . wait ( ) return result | Synchronously disconnect from a connected device |
21,597 | def probe_sync ( self ) : done = threading . Event ( ) result = { } def probe_done ( adapter_id , status , reason ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason done . set ( ) self . probe_async ( probe_done ) done . wait ( ) return result | Synchronously probe for devices on this adapter . |
21,598 | def send_rpc_sync ( self , conn_id , address , rpc_id , payload , timeout ) : done = threading . Event ( ) result = { } def send_rpc_done ( conn_id , adapter_id , status , reason , rpc_status , resp_payload ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason result [ 'status' ] = rpc_status result [ 'payload' ] = resp_payload done . set ( ) self . send_rpc_async ( conn_id , address , rpc_id , payload , timeout , send_rpc_done ) done . wait ( ) return result | Synchronously send an RPC to this IOTile device |
21,599 | def FindByName ( cls , name ) : reg = ComponentRegistry ( ) for _ , entry in reg . load_extensions ( 'iotile.auth_provider' , name_filter = name ) : return entry | Find a specific installed auth provider by name . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.