idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
21,400
def todict ( self ) : r = { } r . update ( self . _buf ) if self . start_addr : r [ 'start_addr' ] = self . start_addr return r
Convert to python dictionary .
43
6
21,401
def tofile ( self , fobj , format ) : if format == 'hex' : self . write_hex_file ( fobj ) elif format == 'bin' : self . tobinfile ( fobj ) else : raise ValueError ( 'format should be either "hex" or "bin";' ' got %r instead' % format )
Write data to hex or bin file . Preferred method over tobin or tohex .
75
17
21,402
def gets ( self , addr , length ) : a = array ( 'B' , asbytes ( '\0' * length ) ) try : for i in range_g ( length ) : a [ i ] = self . _buf [ addr + i ] except KeyError : raise NotEnoughDataError ( address = addr , length = length ) return array_tobytes ( a )
Get string of bytes from given address . If any entries are blank from addr through addr + length a NotEnoughDataError exception will be raised . Padding is not used .
82
35
21,403
def puts ( self , addr , s ) : a = array ( 'B' , asbytes ( s ) ) for i in range_g ( len ( a ) ) : self . _buf [ addr + i ] = a [ i ]
Put string of bytes at given address . Will overwrite any previous entries .
51
14
21,404
def getsz ( self , addr ) : i = 0 try : while True : if self . _buf [ addr + i ] == 0 : break i += 1 except KeyError : raise NotEnoughDataError ( msg = ( 'Bad access at 0x%X: ' 'not enough data to read zero-terminated string' ) % addr ) return self . gets ( addr , i )
Get zero - terminated bytes string from given address . Will raise NotEnoughDataError exception if a hole is encountered before a 0 .
82
26
21,405
def putsz ( self , addr , s ) : self . puts ( addr , s ) self . _buf [ addr + len ( s ) ] = 0
Put bytes string in object at addr and append terminating zero at end .
33
14
21,406
def dump ( self , tofile = None , width = 16 , withpadding = False ) : if not isinstance ( width , int ) or width < 1 : raise ValueError ( 'width must be a positive integer.' ) # The integer can be of float type - does not work with bit operations width = int ( width ) if tofile is None : tofile = sys . stdout # start addr possibly if self . start_addr is not None : cs = self . start_addr . get ( 'CS' ) ip = self . start_addr . get ( 'IP' ) eip = self . start_addr . get ( 'EIP' ) if eip is not None and cs is None and ip is None : tofile . write ( 'EIP = 0x%08X\n' % eip ) elif eip is None and cs is not None and ip is not None : tofile . write ( 'CS = 0x%04X, IP = 0x%04X\n' % ( cs , ip ) ) else : tofile . write ( 'start_addr = %r\n' % start_addr ) # actual data addresses = dict_keys ( self . _buf ) if addresses : addresses . sort ( ) minaddr = addresses [ 0 ] maxaddr = addresses [ - 1 ] startaddr = ( minaddr // width ) * width endaddr = ( ( maxaddr // width ) + 1 ) * width maxdigits = max ( len ( hex ( endaddr ) ) - 2 , 4 ) # Less 2 to exclude '0x' templa = '%%0%dX' % maxdigits rangewidth = range_l ( width ) if withpadding : pad = self . padding else : pad = None for i in range_g ( startaddr , endaddr , width ) : tofile . write ( templa % i ) tofile . write ( ' ' ) s = [ ] for j in rangewidth : x = self . _buf . get ( i + j , pad ) if x is not None : tofile . write ( ' %02X' % x ) if 32 <= x < 127 : # GNU less does not like 0x7F (128 decimal) so we'd better show it as dot s . append ( chr ( x ) ) else : s . append ( '.' ) else : tofile . write ( ' --' ) s . append ( ' ' ) tofile . write ( ' |' + '' . join ( s ) + '|\n' )
Dump object content to specified file object or to stdout if None . Format is a hexdump with some header information at the beginning addresses on the left and data on right .
543
36
21,407
def segments ( self ) : addresses = self . addresses ( ) if not addresses : return [ ] elif len ( addresses ) == 1 : return ( [ ( addresses [ 0 ] , addresses [ 0 ] + 1 ) ] ) adjacent_differences = [ ( b - a ) for ( a , b ) in zip ( addresses [ : - 1 ] , addresses [ 1 : ] ) ] breaks = [ i for ( i , x ) in enumerate ( adjacent_differences ) if x > 1 ] endings = [ addresses [ b ] for b in breaks ] endings . append ( addresses [ - 1 ] ) beginings = [ addresses [ b + 1 ] for b in breaks ] beginings . insert ( 0 , addresses [ 0 ] ) return [ ( a , b + 1 ) for ( a , b ) in zip ( beginings , endings ) ]
Return a list of ordered tuple objects representing contiguous occupied data addresses . Each tuple has a length of two and follows the semantics of the range and xrange objects . The second entry of the tuple is always an integer greater than the first entry .
178
48
21,408
def get_memory_size ( self ) : n = sys . getsizeof ( self ) n += sys . getsizeof ( self . padding ) n += total_size ( self . start_addr ) n += total_size ( self . _buf ) n += sys . getsizeof ( self . _offset ) return n
Returns the approximate memory footprint for data .
70
8
21,409
def _from_bytes ( bytes ) : assert len ( bytes ) >= 4 # calculate checksum s = ( - sum ( bytes ) ) & 0x0FF bin = array ( 'B' , bytes + [ s ] ) return ':' + asstr ( hexlify ( array_tobytes ( bin ) ) ) . upper ( )
Takes a list of bytes computes the checksum and outputs the entire record as a string . bytes should be the hex record without the colon or final checksum .
73
34
21,410
def create_release_settings_action ( target , source , env ) : with open ( str ( source [ 0 ] ) , "r" ) as fileobj : settings = json . load ( fileobj ) settings [ 'release' ] = True settings [ 'release_date' ] = datetime . datetime . utcnow ( ) . isoformat ( ) settings [ 'dependency_versions' ] = { } #Also insert the versions of every dependency that we used to build this component for dep in env [ 'TILE' ] . dependencies : tile = IOTile ( os . path . join ( 'build' , 'deps' , dep [ 'unique_id' ] ) ) settings [ 'dependency_versions' ] [ dep [ 'unique_id' ] ] = str ( tile . parsed_version ) with open ( str ( target [ 0 ] ) , "w" ) as fileobj : json . dump ( settings , fileobj , indent = 4 )
Copy module_settings . json and add release and build information
208
12
21,411
def copy_extra_files ( tile ) : env = Environment ( tools = [ ] ) outputbase = os . path . join ( 'build' , 'output' ) for src , dest in tile . settings . get ( 'copy_files' , { } ) . items ( ) : outputfile = os . path . join ( outputbase , dest ) env . Command ( [ outputfile ] , [ src ] , Copy ( "$TARGET" , "$SOURCE" ) ) resolver = ProductResolver . Create ( ) for src , dest in tile . settings . get ( 'copy_products' , { } ) . items ( ) : prod = resolver . find_unique ( None , src ) outputfile = os . path . join ( outputbase , dest ) env . Command ( [ outputfile ] , [ prod . full_path ] , Copy ( "$TARGET" , "$SOURCE" ) )
Copy all files listed in a copy_files and copy_products section .
191
15
21,412
def generate ( env ) : static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in ASSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASAction ) shared_obj . add_action ( suffix , SCons . Defaults . ASAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) for suffix in ASPPSuffixes : static_obj . add_action ( suffix , SCons . Defaults . ASPPAction ) shared_obj . add_action ( suffix , SCons . Defaults . ASPPAction ) static_obj . add_emitter ( suffix , SCons . Defaults . StaticObjectEmitter ) shared_obj . add_emitter ( suffix , SCons . Defaults . SharedObjectEmitter ) env [ 'AS' ] = 'ml' env [ 'ASFLAGS' ] = SCons . Util . CLVar ( '/nologo' ) env [ 'ASPPFLAGS' ] = '$ASFLAGS' env [ 'ASCOM' ] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES' env [ 'ASPPCOM' ] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 1
Add Builders and construction variables for masm to an Environment .
364
13
21,413
def median ( values ) : values . sort ( ) n = int ( len ( values ) / 2 ) return values [ n ]
Return median value for the list of values .
27
9
21,414
def time_coef ( tc , nc , tb , nb ) : tc = float ( tc ) nc = float ( nc ) tb = float ( tb ) nb = float ( nb ) q = ( tc * nb ) / ( tb * nc ) return q
Return time coefficient relative to base numbers .
66
8
21,415
def main ( argv = None ) : import getopt # default values test_read = None test_write = None n = 3 # number of repeat if argv is None : argv = sys . argv [ 1 : ] try : opts , args = getopt . getopt ( argv , 'hn:rw' , [ ] ) for o , a in opts : if o == '-h' : print ( HELP ) return 0 elif o == '-n' : n = int ( a ) elif o == '-r' : test_read = True elif o == '-w' : test_write = True if args : raise getopt . GetoptError ( 'Arguments are not used.' ) except getopt . GetoptError : msg = sys . exc_info ( ) [ 1 ] # current exception txt = str ( msg ) print ( txt ) return 1 if ( test_read , test_write ) == ( None , None ) : test_read = test_write = True m = Measure ( n , test_read , test_write ) m . measure_all ( ) m . print_report ( ) return 0
Main function to run benchmarks .
251
6
21,416
def measure_one ( self , data ) : _unused , hexstr , ih = data tread , twrite = 0.0 , 0.0 if self . read : tread = run_readtest_N_times ( intelhex . IntelHex , hexstr , self . n ) [ 0 ] if self . write : twrite = run_writetest_N_times ( ih . write_hex_file , self . n ) [ 0 ] return tread , twrite
Do measuring of read and write operations .
105
8
21,417
def _get_key ( cls , device_id ) : var_name = "USER_KEY_{0:08X}" . format ( device_id ) if var_name not in os . environ : raise NotFoundError ( "No user key could be found for devices" , device_id = device_id , expected_variable_name = var_name ) key_var = os . environ [ var_name ] if len ( key_var ) != 64 : raise NotFoundError ( "User key in variable is not the correct length, should be 64 hex characters" , device_id = device_id , key_value = key_var ) try : key = binascii . unhexlify ( key_var ) except ValueError : raise NotFoundError ( "User key in variable could not be decoded from hex" , device_id = device_id , key_value = key_var ) if len ( key ) != 32 : raise NotFoundError ( "User key in variable is not the correct length, should be 64 hex characters" , device_id = device_id , key_value = key_var ) return key
Attempt to get a user key from an environment variable
248
10
21,418
def decrypt_report ( self , device_id , root , data , * * kwargs ) : report_key = self . _verify_derive_key ( device_id , root , * * kwargs ) try : from Crypto . Cipher import AES import Crypto . Util . Counter except ImportError : raise NotFoundError ctr = Crypto . Util . Counter . new ( 128 ) # We use AES-128 for encryption encryptor = AES . new ( bytes ( report_key [ : 16 ] ) , AES . MODE_CTR , counter = ctr ) decrypted = encryptor . decrypt ( bytes ( data ) ) return { 'data' : decrypted }
Decrypt a buffer of report data on behalf of a device .
147
13
21,419
def join_path ( path ) : if isinstance ( path , str ) : return path return os . path . join ( * path )
If given a string return it otherwise combine a list into a string using os . path . join
29
19
21,420
def build_defines ( defines ) : return [ '-D"%s=%s"' % ( x , str ( y ) ) for x , y in defines . items ( ) if y is not None ]
Build a list of - D directives to pass to the compiler .
46
13
21,421
def _open_interface ( self , conn_id , iface , callback ) : try : context = self . conns . get_context ( conn_id ) except ArgumentError : callback ( conn_id , self . id , False , "Could not find connection information" ) return self . conns . begin_operation ( conn_id , 'open_interface' , callback , self . get_config ( 'default_timeout' ) ) topics = context [ 'topics' ] open_iface_message = { 'key' : context [ 'key' ] , 'type' : 'command' , 'operation' : 'open_interface' , 'client' : self . name , 'interface' : iface } self . client . publish ( topics . action , open_iface_message )
Open an interface on this device
172
6
21,422
def stop_sync ( self ) : conn_ids = self . conns . get_connections ( ) # If we have any open connections, try to close them here before shutting down for conn in list ( conn_ids ) : try : self . disconnect_sync ( conn ) except HardwareError : pass self . client . disconnect ( ) self . conns . stop ( )
Synchronously stop this adapter
79
6
21,423
def probe_async ( self , callback ) : topics = MQTTTopicValidator ( self . prefix ) self . client . publish ( topics . probe , { 'type' : 'command' , 'operation' : 'probe' , 'client' : self . name } ) callback ( self . id , True , None )
Probe for visible devices connected to this DeviceAdapter .
71
11
21,424
def periodic_callback ( self ) : while True : try : action = self . _deferred . get ( False ) action ( ) except queue . Empty : break except Exception : self . _logger . exception ( 'Exception in periodic callback' )
Periodically help maintain adapter internal state
52
8
21,425
def _bind_topics ( self , topics ) : # FIXME: Allow for these subscriptions to fail and clean up the previous ones # so that this function is atomic self . client . subscribe ( topics . status , self . _on_status_message ) self . client . subscribe ( topics . tracing , self . _on_trace ) self . client . subscribe ( topics . streaming , self . _on_report ) self . client . subscribe ( topics . response , self . _on_response_message )
Subscribe to all the topics we need to communication with this device
107
12
21,426
def _unbind_topics ( self , topics ) : self . client . unsubscribe ( topics . status ) self . client . unsubscribe ( topics . tracing ) self . client . unsubscribe ( topics . streaming ) self . client . unsubscribe ( topics . response )
Unsubscribe to all of the topics we needed for communication with device
57
14
21,427
def _find_connection ( self , topic ) : parts = topic . split ( '/' ) if len ( parts ) < 3 : return None slug = parts [ - 3 ] return slug
Attempt to find a connection id corresponding with a topic
39
10
21,428
def _on_report ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping report message that does not correspond with a known connection, topic=%s" , topic ) return try : rep_msg = messages . ReportNotification . verify ( message ) serialized_report = { } serialized_report [ 'report_format' ] = rep_msg [ 'report_format' ] serialized_report [ 'encoded_report' ] = rep_msg [ 'report' ] serialized_report [ 'received_time' ] = datetime . datetime . strptime ( rep_msg [ 'received_time' ] . encode ( ) . decode ( ) , "%Y%m%dT%H:%M:%S.%fZ" ) report = self . report_parser . deserialize_report ( serialized_report ) self . _trigger_callback ( 'on_report' , conn_id , report ) except Exception : self . _logger . exception ( "Error processing report conn_id=%d" , conn_id )
Process a report received from a device .
277
8
21,429
def _on_trace ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping trace message that does not correspond with a known connection, topic=%s" , topic ) return try : tracing = messages . TracingNotification . verify ( message ) self . _trigger_callback ( 'on_trace' , conn_id , tracing [ 'trace' ] ) except Exception : self . _logger . exception ( "Error processing trace conn_id=%d" , conn_id )
Process a trace received from a device .
150
8
21,430
def _on_status_message ( self , sequence , topic , message ) : self . _logger . debug ( "Received message on (topic=%s): %s" % ( topic , message ) ) try : conn_key = self . _find_connection ( topic ) except ArgumentError : self . _logger . warn ( "Dropping message that does not correspond with a known connection, message=%s" , message ) return if messages . ConnectionResponse . matches ( message ) : if self . name != message [ 'client' ] : self . _logger . debug ( "Connection response received for a different client, client=%s, name=%s" , message [ 'client' ] , self . name ) return self . conns . finish_connection ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) else : self . _logger . warn ( "Dropping message that did not correspond with a known schema, message=%s" , message )
Process a status message received
223
5
21,431
def _on_response_message ( self , sequence , topic , message ) : try : conn_key = self . _find_connection ( topic ) context = self . conns . get_context ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping message that does not correspond with a known connection, message=%s" , message ) return if 'client' in message and message [ 'client' ] != self . name : self . _logger . debug ( "Dropping message that is for another client %s, we are %s" , message [ 'client' ] , self . name ) if messages . DisconnectionResponse . matches ( message ) : self . conns . finish_disconnection ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . OpenInterfaceResponse . matches ( message ) : self . conns . finish_operation ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . RPCResponse . matches ( message ) : rpc_message = messages . RPCResponse . verify ( message ) self . conns . finish_operation ( conn_key , rpc_message [ 'success' ] , rpc_message . get ( 'failure_reason' , None ) , rpc_message . get ( 'status' , None ) , rpc_message . get ( 'payload' , None ) ) elif messages . ProgressNotification . matches ( message ) : progress_callback = context . get ( 'progress_callback' , None ) if progress_callback is not None : progress_callback ( message [ 'done_count' ] , message [ 'total_count' ] ) elif messages . ScriptResponse . matches ( message ) : if 'progress_callback' in context : del context [ 'progress_callback' ] self . conns . finish_operation ( conn_key , message [ 'success' ] , message . get ( 'failure_reason' , None ) ) elif messages . DisconnectionNotification . matches ( message ) : try : conn_key = self . _find_connection ( topic ) conn_id = self . conns . get_connection_id ( conn_key ) except ArgumentError : self . _logger . warn ( "Dropping disconnect notification that does not correspond with a known connection, topic=%s" , topic ) return self . conns . unexpected_disconnect ( conn_key ) self . _trigger_callback ( 'on_disconnect' , self . id , conn_id ) else : self . _logger . warn ( "Invalid response message received, message=%s" , message )
Process a response message received
591
5
21,432
def write_output ( output , text = True , output_path = None ) : if output_path is None and text is False : print ( "ERROR: You must specify an output file using -o/--output for binary output formats" ) sys . exit ( 1 ) if output_path is not None : if text : outfile = open ( output_path , "w" , encoding = "utf-8" ) else : outfile = open ( output_path , "wb" ) else : outfile = sys . stdout try : if text and isinstance ( output , bytes ) : output = output . decode ( 'utf-8' ) outfile . write ( output ) finally : if outfile is not sys . stdout : outfile . close ( )
Write binary or text output to a file or stdout .
164
12
21,433
def main ( ) : arg_parser = build_args ( ) args = arg_parser . parse_args ( ) model = DeviceModel ( ) parser = SensorGraphFileParser ( ) parser . parse_file ( args . sensor_graph ) if args . format == u'ast' : write_output ( parser . dump_tree ( ) , True , args . output ) sys . exit ( 0 ) parser . compile ( model ) if not args . disable_optimizer : opt = SensorGraphOptimizer ( ) opt . optimize ( parser . sensor_graph , model = model ) if args . format == u'nodes' : output = u'\n' . join ( parser . sensor_graph . dump_nodes ( ) ) + u'\n' write_output ( output , True , args . output ) else : if args . format not in KNOWN_FORMATS : print ( "Unknown output format: {}" . format ( args . format ) ) sys . exit ( 1 ) output_format = KNOWN_FORMATS [ args . format ] output = output_format . format ( parser . sensor_graph ) write_output ( output , output_format . text , args . output )
Main entry point for iotile - sgcompile .
256
13
21,434
def load_external_components ( typesys ) : # Find all of the registered IOTile components and see if we need to add any type libraries for them from iotile . core . dev . registry import ComponentRegistry reg = ComponentRegistry ( ) modules = reg . list_components ( ) typelibs = reduce ( lambda x , y : x + y , [ reg . find_component ( x ) . find_products ( 'type_package' ) for x in modules ] , [ ] ) for lib in typelibs : if lib . endswith ( '.py' ) : lib = lib [ : - 3 ] typesys . load_external_types ( lib )
Load all external types defined by iotile plugins .
149
11
21,435
def add_recipe_folder ( self , recipe_folder , whitelist = None ) : if whitelist is not None : whitelist = set ( whitelist ) if recipe_folder == '' : recipe_folder = '.' for yaml_file in [ x for x in os . listdir ( recipe_folder ) if x . endswith ( '.yaml' ) ] : if whitelist is not None and yaml_file not in whitelist : continue recipe = RecipeObject . FromFile ( os . path . join ( recipe_folder , yaml_file ) , self . _recipe_actions , self . _recipe_resources ) self . _recipes [ recipe . name ] = recipe for ship_file in [ x for x in os . listdir ( recipe_folder ) if x . endswith ( '.ship' ) ] : if whitelist is not None and ship_file not in whitelist : continue recipe = RecipeObject . FromArchive ( os . path . join ( recipe_folder , ship_file ) , self . _recipe_actions , self . _recipe_resources ) self . _recipes [ recipe . name ] = recipe
Add all recipes inside a folder to this RecipeManager with an optional whitelist .
251
16
21,436
def add_recipe_actions ( self , recipe_actions ) : for action_name , action in recipe_actions : self . _recipe_actions [ action_name ] = action
Add additional valid recipe actions to RecipeManager
40
8
21,437
def get_recipe ( self , recipe_name ) : if recipe_name . endswith ( '.yaml' ) : recipe = self . _recipes . get ( RecipeObject . FromFile ( recipe_name , self . _recipe_actions , self . _recipe_resources ) . name ) else : recipe = self . _recipes . get ( recipe_name ) if recipe is None : raise RecipeNotFoundError ( "Could not find recipe" , recipe_name = recipe_name , known_recipes = [ x for x in self . _recipes . keys ( ) ] ) return recipe
Get a recipe by name .
132
6
21,438
def _check_time_backwards ( self ) : now = time . time ( ) if now < self . start : self . start = now self . end = self . start + self . length
Make sure a clock reset didn t cause time to go backwards
42
12
21,439
def expired ( self ) : if self . _expired_latch : return True self . _check_time_backwards ( ) if time . time ( ) > self . end : self . _expired_latch = True return True return False
Boolean property if this timeout has expired
54
8
21,440
def command ( self , cmd_name , callback , * args ) : cmd = JLinkCommand ( cmd_name , args , callback ) self . _commands . put ( cmd )
Run an asynchronous command .
39
5
21,441
def _send_rpc ( self , device_info , control_info , address , rpc_id , payload , poll_interval , timeout ) : write_address , write_data = control_info . format_rpc ( address , rpc_id , payload ) self . _jlink . memory_write32 ( write_address , write_data ) self . _trigger_rpc ( device_info ) start = monotonic ( ) now = start poll_address , poll_mask = control_info . poll_info ( ) while ( now - start ) < timeout : time . sleep ( poll_interval ) value , = self . _jlink . memory_read8 ( poll_address , 1 ) if value & poll_mask : break now = monotonic ( ) if ( now - start ) >= timeout : raise HardwareError ( "Timeout waiting for RPC response" , timeout = timeout , poll_interval = poll_interval ) read_address , read_length = control_info . response_info ( ) read_data = self . _read_memory ( read_address , read_length , join = True ) return control_info . format_response ( read_data )
Write and trigger an RPC .
259
6
21,442
def _send_script ( self , device_info , control_info , script , progress_callback ) : for i in range ( 0 , len ( script ) , 20 ) : chunk = script [ i : i + 20 ] self . _send_rpc ( device_info , control_info , 8 , 0x2101 , chunk , 0.001 , 1.0 ) if progress_callback is not None : progress_callback ( i + len ( chunk ) , len ( script ) )
Send a script by repeatedly sending it as a bunch of RPCs .
105
14
21,443
def _trigger_rpc ( self , device_info ) : method = device_info . rpc_trigger if isinstance ( method , devices . RPCTriggerViaSWI ) : self . _jlink . memory_write32 ( method . register , [ 1 << method . bit ] ) else : raise HardwareError ( "Unknown RPC trigger method" , method = method )
Trigger an RPC in a device specific way .
79
9
21,444
def _find_control_structure ( self , start_address , search_length ) : words = self . _read_memory ( start_address , search_length , chunk_size = 4 , join = False ) found_offset = None for i , word in enumerate ( words ) : if word == ControlStructure . CONTROL_MAGIC_1 : if ( len ( words ) - i ) < 4 : continue if words [ i + 1 ] == ControlStructure . CONTROL_MAGIC_2 and words [ i + 2 ] == ControlStructure . CONTROL_MAGIC_3 and words [ i + 3 ] == ControlStructure . CONTROL_MAGIC_4 : found_offset = i break if found_offset is None : raise HardwareError ( "Could not find control structure magic value in search area" ) struct_info = words [ found_offset + 4 ] _version , _flags , length = struct . unpack ( "<BBH" , struct . pack ( "<L" , struct_info ) ) if length % 4 != 0 : raise HardwareError ( "Invalid control structure length that was not a multiple of 4" , length = length ) word_length = length // 4 control_data = struct . pack ( "<%dL" % word_length , * words [ found_offset : found_offset + word_length ] ) logger . info ( "Found control stucture at address 0x%08X, word_length=%d" , start_address + 4 * found_offset , word_length ) return ControlStructure ( start_address + 4 * found_offset , control_data )
Find the control structure in RAM for this device .
346
10
21,445
def _verify_control_structure ( self , device_info , control_info = None ) : if control_info is None : control_info = self . _find_control_structure ( device_info . ram_start , device_info . ram_size ) #FIXME: Actually reread the memory here to verify that the control structure is still valid return control_info
Verify that a control structure is still valid or find one .
83
13
21,446
def save ( self , out_path ) : out = { 'selectors' : [ str ( x ) for x in self . selectors ] , 'trace' : [ { 'stream' : str ( DataStream . FromEncoded ( x . stream ) ) , 'time' : x . raw_time , 'value' : x . value , 'reading_id' : x . reading_id } for x in self ] } with open ( out_path , "wb" ) as outfile : json . dump ( out , outfile , indent = 4 )
Save an ascii representation of this simulation trace .
121
11
21,447
def FromFile ( cls , in_path ) : with open ( in_path , "rb" ) as infile : in_data = json . load ( infile ) if not ( 'trace' , 'selectors' ) in in_data : raise ArgumentError ( "Invalid trace file format" , keys = in_data . keys ( ) , expected = ( 'trace' , 'selectors' ) ) selectors = [ DataStreamSelector . FromString ( x ) for x in in_data [ 'selectors' ] ] readings = [ IOTileReading ( x [ 'time' ] , DataStream . FromString ( x [ 'stream' ] ) . encode ( ) , x [ 'value' ] , reading_id = x [ 'reading_id' ] ) for x in in_data [ 'trace' ] ] return SimulationTrace ( readings , selectors = selectors )
Load a previously saved ascii representation of this simulation trace .
194
13
21,448
def _on_scan ( _loop , adapter , _adapter_id , info , expiration_time ) : info [ 'validity_period' ] = expiration_time adapter . notify_event_nowait ( info . get ( 'connection_string' ) , 'device_seen' , info )
Callback when a new device is seen .
65
8
21,449
def _on_report ( _loop , adapter , conn_id , report ) : conn_string = None if conn_id is not None : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if isinstance ( report , BroadcastReport ) : adapter . notify_event_nowait ( conn_string , 'broadcast' , report ) elif conn_string is not None : adapter . notify_event_nowait ( conn_string , 'report' , report ) else : adapter . _logger . debug ( "Dropping report with unknown conn_id=%s" , conn_id )
Callback when a report is received .
138
7
21,450
def _on_trace ( _loop , adapter , conn_id , trace ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : adapter . _logger . debug ( "Dropping trace data with unknown conn_id=%s" , conn_id ) return adapter . notify_event_nowait ( conn_string , 'trace' , trace )
Callback when tracing data is received .
93
7
21,451
def _on_disconnect ( _loop , adapter , _adapter_id , conn_id ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : adapter . _logger . debug ( "Dropping disconnect notification with unknown conn_id=%s" , conn_id ) return adapter . _teardown_connection ( conn_id , force = True ) event = dict ( reason = 'no reason passed from legacy adapter' , expected = False ) adapter . notify_event_nowait ( conn_string , 'disconnection' , event )
Callback when a device disconnects unexpectedly .
135
8
21,452
def _on_progress ( adapter , operation , conn_id , done , total ) : conn_string = adapter . _get_property ( conn_id , 'connection_string' ) if conn_string is None : return adapter . notify_progress ( conn_string , operation , done , total )
Callback when progress is reported .
64
6
21,453
async def start ( self ) : self . _loop . add_task ( self . _periodic_loop , name = "periodic task for %s" % self . _adapter . __class__ . __name__ , parent = self . _task ) self . _adapter . add_callback ( 'on_scan' , functools . partial ( _on_scan , self . _loop , self ) ) self . _adapter . add_callback ( 'on_report' , functools . partial ( _on_report , self . _loop , self ) ) self . _adapter . add_callback ( 'on_trace' , functools . partial ( _on_trace , self . _loop , self ) ) self . _adapter . add_callback ( 'on_disconnect' , functools . partial ( _on_disconnect , self . _loop , self ) )
Start the device adapter .
199
5
21,454
async def stop ( self , _task = None ) : self . _logger . info ( "Stopping adapter wrapper" ) if self . _task . stopped : return for task in self . _task . subtasks : await task . stop ( ) self . _logger . debug ( "Stopping underlying adapter %s" , self . _adapter . __class__ . __name__ ) await self . _execute ( self . _adapter . stop_sync )
Stop the device adapter .
101
5
21,455
async def probe ( self ) : resp = await self . _execute ( self . _adapter . probe_sync ) _raise_error ( None , 'probe' , resp )
Probe for devices connected to this adapter .
40
9
21,456
async def send_script ( self , conn_id , data ) : progress_callback = functools . partial ( _on_progress , self , 'script' , conn_id ) resp = await self . _execute ( self . _adapter . send_script_sync , conn_id , data , progress_callback ) _raise_error ( conn_id , 'send_rpc' , resp )
Send a a script to a device .
89
8
21,457
def autobuild_shiparchive ( src_file ) : if not src_file . endswith ( '.tpl' ) : raise BuildError ( "You must pass a .tpl file to autobuild_shiparchive" , src_file = src_file ) env = Environment ( tools = [ ] ) family = ArchitectureGroup ( 'module_settings.json' ) target = family . platform_independent_target ( ) resolver = ProductResolver . Create ( ) #Parse through build_step products to see what needs to imported custom_steps = [ ] for build_step in family . tile . find_products ( 'build_step' ) : full_file_name = build_step . split ( ":" ) [ 0 ] basename = os . path . splitext ( os . path . basename ( full_file_name ) ) [ 0 ] folder = os . path . dirname ( full_file_name ) fileobj , pathname , description = imp . find_module ( basename , [ folder ] ) mod = imp . load_module ( basename , fileobj , pathname , description ) full_file_name , class_name = build_step . split ( ":" ) custom_steps . append ( ( class_name , getattr ( mod , class_name ) ) ) env [ 'CUSTOM_STEPS' ] = custom_steps env [ "RESOLVER" ] = resolver base_name , tpl_name = _find_basename ( src_file ) yaml_name = tpl_name [ : - 4 ] ship_name = yaml_name [ : - 5 ] + ".ship" output_dir = target . build_dirs ( ) [ 'output' ] build_dir = os . path . join ( target . build_dirs ( ) [ 'build' ] , base_name ) tpl_path = os . path . join ( build_dir , tpl_name ) yaml_path = os . path . join ( build_dir , yaml_name ) ship_path = os . path . join ( build_dir , ship_name ) output_path = os . path . join ( output_dir , ship_name ) # We want to build up all related files in # <build_dir>/<ship archive_folder>/ # - First copy the template yaml over # - Then render the template yaml # - Then find all products referenced in the template yaml and copy them # - over # - Then build a .ship archive # - Then copy that archive into output_dir ship_deps = [ yaml_path ] env . Command ( [ tpl_path ] , [ src_file ] , Copy ( "$TARGET" , "$SOURCE" ) ) prod_deps = _find_product_dependencies ( src_file , resolver ) env . Command ( [ yaml_path ] , [ tpl_path ] , action = Action ( template_shipfile_action , "Rendering $TARGET" ) ) for prod in prod_deps : dest_file = os . path . join ( build_dir , prod . short_name ) ship_deps . append ( dest_file ) env . Command ( [ dest_file ] , [ prod . full_path ] , Copy ( "$TARGET" , "$SOURCE" ) ) env . Command ( [ ship_path ] , [ ship_deps ] , action = Action ( create_shipfile , "Archiving Ship Recipe $TARGET" ) ) env . Command ( [ output_path ] , [ ship_path ] , Copy ( "$TARGET" , "$SOURCE" ) )
Create a ship file archive containing a yaml_file and its dependencies .
791
15
21,458
def create_shipfile ( target , source , env ) : source_dir = os . path . dirname ( str ( source [ 0 ] ) ) recipe_name = os . path . basename ( str ( source [ 0 ] ) ) [ : - 5 ] resman = RecipeManager ( ) resman . add_recipe_actions ( env [ 'CUSTOM_STEPS' ] ) resman . add_recipe_folder ( source_dir , whitelist = [ os . path . basename ( str ( source [ 0 ] ) ) ] ) recipe = resman . get_recipe ( recipe_name ) recipe . archive ( str ( target [ 0 ] ) )
Create a . ship file with all dependencies .
146
9
21,459
def record_trace ( self , selectors = None ) : if selectors is None : selectors = [ x . selector for x in self . sensor_graph . streamers ] self . trace = SimulationTrace ( selectors = selectors ) for sel in selectors : self . sensor_graph . sensor_log . watch ( sel , self . _on_trace_callback )
Record a trace of readings produced by this simulator .
83
10
21,460
def step ( self , input_stream , value ) : reading = IOTileReading ( input_stream . encode ( ) , self . tick_count , value ) self . sensor_graph . process_input ( input_stream , reading , self . rpc_executor )
Step the sensor graph through one since input .
59
9
21,461
def run ( self , include_reset = True , accelerated = True ) : self . _start_tick = self . tick_count if self . _check_stop_conditions ( self . sensor_graph ) : return if include_reset : pass # TODO: include a reset event here # Process all stimuli that occur at the start of the simulation i = None for i , stim in enumerate ( self . stimuli ) : if stim . time != 0 : break reading = IOTileReading ( self . tick_count , stim . stream . encode ( ) , stim . value ) self . sensor_graph . process_input ( stim . stream , reading , self . rpc_executor ) if i is not None and i > 0 : self . stimuli = self . stimuli [ i : ] while not self . _check_stop_conditions ( self . sensor_graph ) : # Process one more one second tick now = monotonic ( ) next_tick = now + 1.0 # To match what is done in actual hardware, we increment tick count so the first tick # is 1. self . tick_count += 1 # Process all stimuli that occur at this tick of the simulation i = None for i , stim in enumerate ( self . stimuli ) : if stim . time != self . tick_count : break reading = IOTileReading ( self . tick_count , stim . stream . encode ( ) , stim . value ) self . sensor_graph . process_input ( stim . stream , reading , self . rpc_executor ) if i is not None and i > 0 : self . stimuli = self . stimuli [ i : ] self . _check_additional_ticks ( self . tick_count ) if ( self . tick_count % 10 ) == 0 : reading = IOTileReading ( self . tick_count , system_tick . encode ( ) , self . tick_count ) self . sensor_graph . process_input ( system_tick , reading , self . rpc_executor ) # Every 10 seconds the battery voltage is reported in 16.16 fixed point format in volts reading = IOTileReading ( self . tick_count , battery_voltage . encode ( ) , int ( self . voltage * 65536 ) ) self . sensor_graph . process_input ( battery_voltage , reading , self . rpc_executor ) now = monotonic ( ) # If we are trying to execute this sensor graph in realtime, wait for # the remaining slice of this tick. if ( not accelerated ) and ( now < next_tick ) : time . sleep ( next_tick - now )
Run this sensor graph until a stop condition is hit .
560
11
21,462
def _check_stop_conditions ( self , sensor_graph ) : for stop in self . stop_conditions : if stop . should_stop ( self . tick_count , self . tick_count - self . _start_tick , sensor_graph ) : return True return False
Check if any of our stop conditions are met .
61
10
21,463
def stimulus ( self , stimulus ) : if not isinstance ( stimulus , SimulationStimulus ) : stimulus = SimulationStimulus . FromString ( stimulus ) self . stimuli . append ( stimulus ) self . stimuli . sort ( key = lambda x : x . time )
Add a simulation stimulus at a given time .
56
9
21,464
def stop_condition ( self , condition ) : # Try to parse this into a stop condition with each of our registered # condition types for cond_format in self . _known_conditions : try : cond = cond_format . FromString ( condition ) self . stop_conditions . append ( cond ) return except ArgumentError : continue raise ArgumentError ( "Stop condition could not be processed by any known StopCondition type" , condition = condition , suggestion = "It may be mistyped or otherwise invalid." )
Add a stop condition to this simulation .
107
8
21,465
def dump ( self ) : walker = self . dump_walker if walker is not None : walker = walker . dump ( ) state = { 'storage' : self . storage . dump ( ) , 'dump_walker' : walker , 'next_id' : self . next_id } return state
Serialize the state of this subsystem into a dict .
68
11
21,466
def clear ( self , timestamp ) : self . storage . clear ( ) self . push ( streams . DATA_CLEARED , timestamp , 1 )
Clear all data from the RSL .
31
8
21,467
def push ( self , stream_id , timestamp , value ) : stream = DataStream . FromEncoded ( stream_id ) reading = IOTileReading ( stream_id , timestamp , value ) try : self . storage . push ( stream , reading ) return Error . NO_ERROR except StorageFullError : return pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . RING_BUFFER_FULL )
Push a value to a stream .
93
7
21,468
def inspect_virtual ( self , stream_id ) : stream = DataStream . FromEncoded ( stream_id ) if stream . buffered : return [ pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . VIRTUAL_STREAM_NOT_FOUND ) , 0 ] try : reading = self . storage . inspect_last ( stream , only_allocated = True ) return [ Error . NO_ERROR , reading . value ] except StreamEmptyError : return [ Error . NO_ERROR , 0 ] except UnresolvedIdentifierError : return [ pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . VIRTUAL_STREAM_NOT_FOUND ) , 0 ]
Inspect the last value written into a virtual stream .
157
11
21,469
def dump_begin ( self , selector_id ) : if self . dump_walker is not None : self . storage . destroy_walker ( self . dump_walker ) selector = DataStreamSelector . FromEncoded ( selector_id ) self . dump_walker = self . storage . create_walker ( selector , skip_all = False ) return Error . NO_ERROR , Error . NO_ERROR , self . dump_walker . count ( )
Start dumping a stream .
95
5
21,470
def dump_seek ( self , reading_id ) : if self . dump_walker is None : return ( pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . STREAM_WALKER_NOT_INITIALIZED ) , Error . NO_ERROR , 0 ) try : exact = self . dump_walker . seek ( reading_id , target = 'id' ) except UnresolvedIdentifierError : return ( pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . NO_MORE_READINGS ) , Error . NO_ERROR , 0 ) error = Error . NO_ERROR if not exact : error = pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . ID_FOUND_FOR_ANOTHER_STREAM ) return ( error , error . NO_ERROR , self . dump_walker . count ( ) )
Seek the dump streamer to a given ID .
195
11
21,471
def dump_next ( self ) : if self . dump_walker is None : return pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . STREAM_WALKER_NOT_INITIALIZED ) try : return self . dump_walker . pop ( ) except StreamEmptyError : return None
Dump the next reading from the stream .
70
9
21,472
def highest_stored_id ( self ) : shared = [ 0 ] def _keep_max ( _i , reading ) : if reading . reading_id > shared [ 0 ] : shared [ 0 ] = reading . reading_id self . engine . scan_storage ( 'storage' , _keep_max ) self . engine . scan_storage ( 'streaming' , _keep_max ) return shared [ 0 ]
Scan through the stored readings and report the highest stored id .
90
12
21,473
def rsl_push_reading ( self , value , stream_id ) : #FIXME: Fix this with timestamp from clock manager task err = self . sensor_log . push ( stream_id , 0 , value ) return [ err ]
Push a reading to the RSL directly .
51
9
21,474
def rsl_push_many_readings ( self , value , count , stream_id ) : #FIXME: Fix this with timestamp from clock manager task for i in range ( 1 , count + 1 ) : err = self . sensor_log . push ( stream_id , 0 , value ) if err != Error . NO_ERROR : return [ err , i ] return [ Error . NO_ERROR , count ]
Push many copies of a reading to the RSL .
89
11
21,475
def rsl_count_readings ( self ) : storage , output = self . sensor_log . count ( ) return [ Error . NO_ERROR , storage , output ]
Count how many readings are stored in the RSL .
37
11
21,476
def rsl_dump_stream_begin ( self , stream_id ) : err , err2 , count = self . sensor_log . dump_begin ( stream_id ) #FIXME: Fix this with the uptime of the clock manager task return [ err , err2 , count , 0 ]
Begin dumping the contents of a stream .
64
8
21,477
def rsl_dump_stream_next ( self , output_format ) : timestamp = 0 stream_id = 0 value = 0 reading_id = 0 error = Error . NO_ERROR reading = self . sensor_log . dump_next ( ) if reading is not None : timestamp = reading . raw_time stream_id = reading . stream value = reading . value reading_id = reading . reading_id else : error = pack_error ( ControllerSubsystem . SENSOR_LOG , SensorLogError . NO_MORE_READINGS ) if output_format == 0 : return [ struct . pack ( "<LLL" , error , timestamp , value ) ] elif output_format != 1 : raise ValueError ( "Output format other than 1 not yet supported" ) return [ struct . pack ( "<LLLLH2x" , error , timestamp , value , reading_id , stream_id ) ]
Dump the next reading from the output stream .
193
10
21,478
def parse_size_name ( type_name ) : if ' ' in type_name : raise ArgumentError ( "There should not be a space in config variable type specifier" , specifier = type_name ) variable = False count = 1 base_type = type_name if type_name [ - 1 ] == ']' : variable = True start_index = type_name . find ( '[' ) if start_index == - 1 : raise ArgumentError ( "Could not find matching [ for ] character" , specifier = type_name ) count = int ( type_name [ start_index + 1 : - 1 ] , 0 ) base_type = type_name [ : start_index ] matched_type = TYPE_CODES . get ( base_type ) if matched_type is None : raise ArgumentError ( "Could not find base type name" , base_type = base_type , type_string = type_name ) base_size = struct . calcsize ( "<%s" % matched_type ) total_size = base_size * count return total_size , base_size , matched_type , variable
Calculate size and encoding from a type name .
244
11
21,479
def _validate_python_type ( self , python_type ) : if python_type == 'bool' : if self . variable : raise ArgumentError ( "You can only specify a bool python type on a scalar (non-array) type_name" , type_name = self . type_name ) return if python_type == 'string' : if not ( self . variable and self . unit_size == 1 ) : raise ArgumentError ( "You can only pass a string python type on an array of 1-byte objects" , type_name = self . type_name ) return if python_type is not None : raise ArgumentError ( "You can only declare a bool or string python type. Otherwise it must be passed as None" , python_type = python_type )
Validate the possible combinations of python_type and type_name .
168
14
21,480
def _convert_default_value ( self , default ) : if default is None : return None if isinstance ( default , str ) : if self . special_type == 'string' : return default . encode ( 'utf-8' ) + b'\0' raise DataError ( "You can only pass a unicode string if you are declaring a string type config variable" , default = default ) if isinstance ( default , ( bytes , bytearray ) ) : if self . special_type == 'string' and isinstance ( default , bytes ) : default += b'\0' return default if isinstance ( default , int ) : default = [ default ] format_string = "<" + ( self . base_type * len ( default ) ) return struct . pack ( format_string , * default )
Convert the passed default value to binary .
175
9
21,481
def clear ( self ) : if self . default_value is None : self . current_value = bytearray ( ) else : self . current_value = bytearray ( self . default_value )
Clear this config variable to its reset value .
46
9
21,482
def update_value ( self , offset , value ) : if offset + len ( value ) > self . total_size : return Error . INPUT_BUFFER_TOO_LONG if len ( self . current_value ) < offset : self . current_value += bytearray ( offset - len ( self . current_value ) ) if len ( self . current_value ) > offset : self . current_value = self . current_value [ : offset ] self . current_value += bytearray ( value ) return 0
Update the binary value currently stored for this config value .
116
11
21,483
def latch ( self ) : if len ( self . current_value ) == 0 : raise DataError ( "There was no data in a config variable during latching" , name = self . name ) # Make sure the data ends on a unit boundary. This would have happened automatically # in an actual device by the C runtime 0 padding out the storage area. remaining = len ( self . current_value ) % self . unit_size if remaining > 0 : self . current_value += bytearray ( remaining ) if self . special_type == 'string' : if self . current_value [ - 1 ] != 0 : raise DataError ( "String type was specified by data did not end with a null byte" , data = self . current_value , name = self . name ) return bytes ( self . current_value [ : - 1 ] ) . decode ( 'utf-8' ) fmt_code = "<" + ( self . base_type * ( len ( self . current_value ) // self . unit_size ) ) data = struct . unpack ( fmt_code , self . current_value ) if self . variable : data = list ( data ) else : data = data [ 0 ] if self . special_type == 'bool' : data = bool ( data ) return data
Convert the current value inside this config descriptor to a python object .
275
14
21,484
def declare_config_variable ( self , name , config_id , type_name , default = None , convert = None ) : #pylint:disable=too-many-arguments;These are all necessary with sane defaults. config = ConfigDescriptor ( config_id , type_name , default , name = name , python_type = convert ) self . _config_variables [ config_id ] = config
Declare a config variable that this emulated tile accepts .
91
12
21,485
def latch_config_variables ( self ) : return { desc . name : desc . latch ( ) for desc in self . _config_variables . values ( ) }
Latch the current value of all config variables as python objects .
37
13
21,486
async def reset ( self ) : await self . _device . emulator . stop_tasks ( self . address ) self . _handle_reset ( ) self . _logger . info ( "Tile at address %d has reset itself." , self . address ) self . _logger . info ( "Starting main task for tile at address %d" , self . address ) self . _device . emulator . add_task ( self . address , self . _reset_vector ( ) )
Synchronously reset a tile .
104
7
21,487
def list_config_variables ( self , offset ) : names = sorted ( self . _config_variables ) names = names [ offset : offset + 9 ] count = len ( names ) if len ( names ) < 9 : names += [ 0 ] * ( 9 - count ) return [ count ] + names
List defined config variables up to 9 at a time .
66
11
21,488
def describe_config_variable ( self , config_id ) : config = self . _config_variables . get ( config_id ) if config is None : return [ Error . INVALID_ARRAY_KEY , 0 , 0 , 0 , 0 ] packed_size = config . total_size packed_size |= int ( config . variable ) << 15 return [ 0 , 0 , 0 , config_id , packed_size ]
Describe the config variable by its id .
94
9
21,489
def set_config_variable ( self , config_id , offset , value ) : if self . initialized . is_set ( ) : return [ Error . STATE_CHANGE_AT_INVALID_TIME ] config = self . _config_variables . get ( config_id ) if config is None : return [ Error . INVALID_ARRAY_KEY ] error = config . update_value ( offset , value ) return [ error ]
Set a chunk of the current config value s value .
96
11
21,490
def get_config_variable ( self , config_id , offset ) : config = self . _config_variables . get ( config_id ) if config is None : return [ b"" ] return [ bytes ( config . current_value [ offset : offset + 20 ] ) ]
Get a chunk of a config variable s value .
60
10
21,491
def add_callback ( self , name , func ) : if name not in self . callbacks : raise ValueError ( "Unknown callback name: %s" % name ) self . callbacks [ name ] . add ( func )
Add a callback when Device events happen
48
7
21,492
def connect_sync ( self , connection_id , connection_string ) : calldone = threading . Event ( ) results = { } def connect_done ( callback_connid , callback_adapterid , callback_success , failure_reason ) : results [ 'success' ] = callback_success results [ 'failure_reason' ] = failure_reason calldone . set ( ) # Be sure to set after all operations are done to prevent race condition self . connect_async ( connection_id , connection_string , connect_done ) calldone . wait ( ) return results
Synchronously connect to a device
127
7
21,493
def disconnect_sync ( self , conn_id ) : done = threading . Event ( ) result = { } def disconnect_done ( conn_id , adapter_id , status , reason ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason done . set ( ) self . disconnect_async ( conn_id , disconnect_done ) done . wait ( ) return result
Synchronously disconnect from a connected device
88
8
21,494
def probe_sync ( self ) : done = threading . Event ( ) result = { } def probe_done ( adapter_id , status , reason ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason done . set ( ) self . probe_async ( probe_done ) done . wait ( ) return result
Synchronously probe for devices on this adapter .
76
10
21,495
def send_rpc_sync ( self , conn_id , address , rpc_id , payload , timeout ) : done = threading . Event ( ) result = { } def send_rpc_done ( conn_id , adapter_id , status , reason , rpc_status , resp_payload ) : result [ 'success' ] = status result [ 'failure_reason' ] = reason result [ 'status' ] = rpc_status result [ 'payload' ] = resp_payload done . set ( ) self . send_rpc_async ( conn_id , address , rpc_id , payload , timeout , send_rpc_done ) done . wait ( ) return result
Synchronously send an RPC to this IOTile device
155
12
21,496
def FindByName ( cls , name ) : reg = ComponentRegistry ( ) for _ , entry in reg . load_extensions ( 'iotile.auth_provider' , name_filter = name ) : return entry
Find a specific installed auth provider by name .
49
9
21,497
def DeriveReportKey ( cls , root_key , report_id , sent_timestamp ) : signed_data = struct . pack ( "<LLL" , AuthProvider . ReportKeyMagic , report_id , sent_timestamp ) hmac_calc = hmac . new ( root_key , signed_data , hashlib . sha256 ) return bytearray ( hmac_calc . digest ( ) )
Derive a standard one time use report signing key .
94
11
21,498
def declare ( self , name ) : if name in self . _data : raise KeyError ( "Declared name {} that already existed" . format ( name ) ) self . _data [ name ] = self . _loop . create_future ( )
Declare that a key will be set in the future .
53
12
21,499
async def get ( self , name , timeout = None , autoremove = True ) : self . _ensure_declared ( name ) try : await asyncio . wait_for ( self . _data [ name ] , timeout , loop = self . _loop . get_loop ( ) ) return self . _data [ name ] . result ( ) finally : if autoremove : self . _data [ name ] . cancel ( ) del self . _data [ name ]
Wait for a value to be set for a key .
102
11