idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
22,400
def _decode_datetime ( obj ) : if '__datetime__' in obj : obj = datetime . datetime . strptime ( obj [ 'as_str' ] . decode ( ) , "%Y%m%dT%H:%M:%S.%f" ) return obj
Decode a msgpack ed datetime .
68
9
22,401
def _encode_datetime ( obj ) : if isinstance ( obj , datetime . datetime ) : obj = { '__datetime__' : True , 'as_str' : obj . strftime ( "%Y%m%dT%H:%M:%S.%f" ) . encode ( ) } return obj
Encode a msgpck ed datetime .
74
10
22,402
def _versioned_lib_suffix ( env , suffix , version ) : Verbose = False if Verbose : print ( "_versioned_lib_suffix: suffix= " , suffix ) print ( "_versioned_lib_suffix: version= " , version ) cygversion = re . sub ( '\.' , '-' , version ) if not suffix . startswith ( '-' + cygversion ) : suffix = '-' + cygversion + suffix if Verbose : print ( "_versioned_lib_suffix: return suffix= " , suffix ) return suffix
Generate versioned shared library suffix from a unversioned one . If suffix = . dll and version = 0 . 1 . 2 then it returns - 0 - 1 - 2 . dll
126
40
22,403
def generate ( env ) : gnulink . generate ( env ) env [ 'LINKFLAGS' ] = SCons . Util . CLVar ( '-Wl,-no-undefined' ) env [ 'SHLINKCOM' ] = shlib_action env [ 'LDMODULECOM' ] = ldmod_action env . Append ( SHLIBEMITTER = [ shlib_emitter ] ) env . Append ( LDMODULEEMITTER = [ ldmod_emitter ] ) env [ 'SHLIBPREFIX' ] = 'cyg' env [ 'SHLIBSUFFIX' ] = '.dll' env [ 'IMPLIBPREFIX' ] = 'lib' env [ 'IMPLIBSUFFIX' ] = '.dll.a' # Variables used by versioned shared libraries env [ '_SHLIBVERSIONFLAGS' ] = '$SHLIBVERSIONFLAGS' env [ '_LDMODULEVERSIONFLAGS' ] = '$LDMODULEVERSIONFLAGS' # SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink... # LINKCALLBACKS are NOT inherited from gnulink env [ 'LINKCALLBACKS' ] = { 'VersionedShLibSuffix' : _versioned_lib_suffix , 'VersionedLdModSuffix' : _versioned_lib_suffix , 'VersionedImpLibSuffix' : _versioned_lib_suffix , 'VersionedShLibName' : link . _versioned_shlib_name , 'VersionedLdModName' : link . _versioned_ldmod_name , 'VersionedShLibImpLibName' : lambda * args : _versioned_implib_name ( * args , libtype = 'ShLib' ) , 'VersionedLdModImpLibName' : lambda * args : _versioned_implib_name ( * args , libtype = 'LdMod' ) , 'VersionedShLibImpLibSymlinks' : lambda * args : _versioned_implib_symlinks ( * args , libtype = 'ShLib' ) , 'VersionedLdModImpLibSymlinks' : lambda * args : _versioned_implib_symlinks ( * args , libtype = 'LdMod' ) , } # these variables were set by gnulink but are not used in cyglink try : del env [ '_SHLIBSONAME' ] except KeyError : pass try : del env [ '_LDMODULESONAME' ] except KeyError : pass
Add Builders and construction variables for cyglink to an Environment .
581
14
22,404
def dispatch ( self , message ) : for validator , callback in self . validators : if not validator . matches ( message ) : continue callback ( message ) return raise ArgumentError ( "No handler was registered for message" , message = message )
Dispatch a message to a callback based on its schema .
52
11
22,405
def main ( raw_args = None ) : if raw_args is None : raw_args = sys . argv [ 1 : ] parser = build_parser ( ) args = parser . parse_args ( raw_args ) if args . firmware_image is None and args . gdb is None : print ( "You must specify either a firmware image or attach a debugger with --gdb <PORT>" ) return 1 test_args = [ 'qemu-system-gnuarmeclipse' , '-verbose' , '-verbose' , '-board' , 'STM32F0-Discovery' , '-nographic' , '-monitor' , 'null' , '-serial' , 'null' , '--semihosting-config' , 'enable=on,target=native' , '-d' , 'unimp,guest_errors' ] if args . firmware_image : test_args += [ '-image' , args . firmware_image ] if args . gdb : test_args += [ '--gdb' , 'tcp::%d' % args . gdb ] proc = subprocess . Popen ( test_args , stdout = sys . stdout , stderr = sys . stderr ) try : proc . communicate ( ) except KeyboardInterrupt : proc . terminate ( ) return 0
Run the iotile - emulate script .
295
9
22,406
def _detect ( env ) : QTDIR = None if not QTDIR : QTDIR = env . get ( 'QTDIR' , None ) if not QTDIR : QTDIR = os . environ . get ( 'QTDIR' , None ) if not QTDIR : moc = env . WhereIs ( 'moc' ) if moc : QTDIR = os . path . dirname ( os . path . dirname ( moc ) ) SCons . Warnings . warn ( QtdirNotFound , "Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR ) else : QTDIR = None SCons . Warnings . warn ( QtdirNotFound , "Could not detect qt, using empty QTDIR" ) return QTDIR
Not really safe but fast method to detect the QT library
185
12
22,407
def CPP_to_Python ( s ) : s = CPP_to_Python_Ops_Expression . sub ( CPP_to_Python_Ops_Sub , s ) for expr , repl in CPP_to_Python_Eval_List : s = expr . sub ( repl , s ) return s
Converts a C pre - processor expression into an equivalent Python expression that can be evaluated .
69
18
22,408
def tupleize ( self , contents ) : global CPP_Expression , Table contents = line_continuations . sub ( '' , contents ) cpp_tuples = CPP_Expression . findall ( contents ) return [ ( m [ 0 ] , ) + Table [ m [ 0 ] ] . match ( m [ 1 ] ) . groups ( ) for m in cpp_tuples ]
Turns the contents of a file into a list of easily - processed tuples describing the CPP lines in the file .
85
25
22,409
def process_contents ( self , contents , fname = None ) : self . stack = [ ] self . dispatch_table = self . default_table . copy ( ) self . current_file = fname self . tuples = self . tupleize ( contents ) self . initialize_result ( fname ) while self . tuples : t = self . tuples . pop ( 0 ) # Uncomment to see the list of tuples being processed (e.g., # to validate the CPP lines are being translated correctly). #print(t) self . dispatch_table [ t [ 0 ] ] ( t ) return self . finalize_result ( fname )
Pre - processes a file contents .
142
7
22,410
def save ( self ) : self . stack . append ( self . dispatch_table ) self . dispatch_table = self . default_table . copy ( )
Pushes the current dispatch table on the stack and re - initializes the current dispatch table to the default .
33
22
22,411
def eval_expression ( self , t ) : t = CPP_to_Python ( ' ' . join ( t [ 1 : ] ) ) try : return eval ( t , self . cpp_namespace ) except ( NameError , TypeError ) : return 0
Evaluates a C preprocessor expression .
57
9
22,412
def emit_rmic_classes ( target , source , env ) : class_suffix = env . get ( 'JAVACLASSSUFFIX' , '.class' ) classdir = env . get ( 'JAVACLASSDIR' ) if not classdir : try : s = source [ 0 ] except IndexError : classdir = '.' else : try : classdir = s . attributes . java_classdir except AttributeError : classdir = '.' classdir = env . Dir ( classdir ) . rdir ( ) if str ( classdir ) == '.' : c_ = None else : c_ = str ( classdir ) + os . sep slist = [ ] for src in source : try : classname = src . attributes . java_classname except AttributeError : classname = str ( src ) if c_ and classname [ : len ( c_ ) ] == c_ : classname = classname [ len ( c_ ) : ] if class_suffix and classname [ : - len ( class_suffix ) ] == class_suffix : classname = classname [ - len ( class_suffix ) : ] s = src . rfile ( ) s . attributes . java_classdir = classdir s . attributes . java_classname = classname slist . append ( s ) stub_suffixes = [ '_Stub' ] if env . get ( 'JAVAVERSION' ) == '1.4' : stub_suffixes . append ( '_Skel' ) tlist = [ ] for s in source : for suff in stub_suffixes : fname = s . attributes . java_classname . replace ( '.' , os . sep ) + suff + class_suffix t = target [ 0 ] . File ( fname ) t . attributes . java_lookupdir = target [ 0 ] tlist . append ( t ) return tlist , source
Create and return lists of Java RMI stub and skeleton class files to be created from a set of class files .
422
23
22,413
def generate ( env ) : env [ 'BUILDERS' ] [ 'RMIC' ] = RMICBuilder env [ 'RMIC' ] = 'rmic' env [ 'RMICFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RMICCOM' ] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}' env [ 'JAVACLASSSUFFIX' ] = '.class'
Add Builders and construction variables for rmic to an Environment .
136
13
22,414
def _set_scan_parameters ( self , interval = 2100 , window = 2100 , active = False ) : active_num = 0 if bool ( active ) : active_num = 1 interval_num = int ( interval * 1000 / 625 ) window_num = int ( window * 1000 / 625 ) payload = struct . pack ( "<HHB" , interval_num , window_num , active_num ) try : response = self . _send_command ( 6 , 7 , payload ) if response . payload [ 0 ] != 0 : return False , { 'reason' : "Could not set scanning parameters" , 'error' : response . payload [ 0 ] } except InternalTimeoutError : return False , { 'reason' : 'Timeout waiting for response' } return True , None
Set the scan interval and window in units of ms and set whether active scanning is performed
165
17
22,415
def _query_systemstate ( self ) : def status_filter_func ( event ) : if event . command_class == 3 and event . command == 0 : return True return False try : response = self . _send_command ( 0 , 6 , [ ] ) maxconn , = unpack ( "<B" , response . payload ) except InternalTimeoutError : return False , { 'reason' : 'Timeout waiting for command response' } events = self . _wait_process_events ( 0.5 , status_filter_func , lambda x : False ) conns = [ ] for event in events : handle , flags , addr , addr_type , interval , timeout , lat , bond = unpack ( "<BB6sBHHHB" , event . payload ) if flags != 0 : conns . append ( handle ) return True , { 'max_connections' : maxconn , 'active_connections' : conns }
Query the maximum number of connections supported by this adapter
199
10
22,416
def _start_scan ( self , active ) : success , retval = self . _set_scan_parameters ( active = active ) if not success : return success , retval try : response = self . _send_command ( 6 , 2 , [ 2 ] ) if response . payload [ 0 ] != 0 : self . _logger . error ( 'Error starting scan for devices, error=%d' , response . payload [ 0 ] ) return False , { 'reason' : "Could not initiate scan for ble devices, error_code=%d, response=%s" % ( response . payload [ 0 ] , response ) } except InternalTimeoutError : return False , { 'reason' : "Timeout waiting for response" } return True , None
Begin scanning forever
161
3
22,417
def _stop_scan ( self ) : try : response = self . _send_command ( 6 , 4 , [ ] ) if response . payload [ 0 ] != 0 : # Error code 129 means we just were not currently scanning if response . payload [ 0 ] != 129 : self . _logger . error ( 'Error stopping scan for devices, error=%d' , response . payload [ 0 ] ) return False , { 'reason' : "Could not stop scan for ble devices" } except InternalTimeoutError : return False , { 'reason' : "Timeout waiting for response" } except DeviceNotConfiguredError : return True , { 'reason' : "Device not connected (did you disconnect the dongle?" } return True , None
Stop scanning for BLE devices
157
6
22,418
def _probe_services ( self , handle ) : code = 0x2800 def event_filter_func ( event ) : if ( event . command_class == 4 and event . command == 2 ) : event_handle , = unpack ( "B" , event . payload [ 0 : 1 ] ) return event_handle == handle return False def end_filter_func ( event ) : if ( event . command_class == 4 and event . command == 1 ) : event_handle , = unpack ( "B" , event . payload [ 0 : 1 ] ) return event_handle == handle return False payload = struct . pack ( '<BHHBH' , handle , 1 , 0xFFFF , 2 , code ) try : response = self . _send_command ( 4 , 1 , payload ) except InternalTimeoutError : return False , { 'reason' : 'Timeout waiting for command response' } handle , result = unpack ( "<BH" , response . payload ) if result != 0 : return False , None events = self . _wait_process_events ( 0.5 , event_filter_func , end_filter_func ) gatt_events = [ x for x in events if event_filter_func ( x ) ] end_events = [ x for x in events if end_filter_func ( x ) ] if len ( end_events ) == 0 : return False , None #Make sure we successfully probed the gatt table end_event = end_events [ 0 ] _ , result , _ = unpack ( "<BHH" , end_event . payload ) if result != 0 : self . _logger . warn ( "Error enumerating GATT table, protocol error code = %d (0x%X)" % ( result , result ) ) return False , None services = { } for event in gatt_events : process_gatt_service ( services , event ) return True , { 'services' : services }
Probe for all primary services and characteristics in those services
419
11
22,419
def _probe_characteristics ( self , conn , services , timeout = 5.0 ) : for service in services . values ( ) : success , result = self . _enumerate_handles ( conn , service [ 'start_handle' ] , service [ 'end_handle' ] ) if not success : return False , None attributes = result [ 'attributes' ] service [ 'characteristics' ] = { } last_char = None for handle , attribute in attributes . items ( ) : if attribute [ 'uuid' ] . hex [ - 4 : ] == '0328' : success , result = self . _read_handle ( conn , handle , timeout ) if not success : return False , None value = result [ 'data' ] char = parse_characteristic_declaration ( value ) service [ 'characteristics' ] [ char [ 'uuid' ] ] = char last_char = char elif attribute [ 'uuid' ] . hex [ - 4 : ] == '0229' : if last_char is None : return False , None success , result = self . _read_handle ( conn , handle , timeout ) if not success : return False , None value = result [ 'data' ] assert len ( value ) == 2 value , = unpack ( "<H" , value ) last_char [ 'client_configuration' ] = { 'handle' : handle , 'value' : value } return True , { 'services' : services }
Probe gatt services for all associated characteristics in a BLE device
316
14
22,420
def _enable_rpcs ( self , conn , services , timeout = 1.0 ) : #FIXME: Check for characteristic existence in a try/catch and return failure if not found success , result = self . _set_notification ( conn , services [ TileBusService ] [ 'characteristics' ] [ TileBusReceiveHeaderCharacteristic ] , True , timeout ) if not success : return success , result return self . _set_notification ( conn , services [ TileBusService ] [ 'characteristics' ] [ TileBusReceivePayloadCharacteristic ] , True , timeout )
Prepare this device to receive RPCs
126
8
22,421
def _disable_rpcs ( self , conn , services , timeout = 1.0 ) : success , result = self . _set_notification ( conn , services [ TileBusService ] [ 'characteristics' ] [ TileBusReceiveHeaderCharacteristic ] , False , timeout ) if not success : return success , result return self . _set_notification ( conn , services [ TileBusService ] [ 'characteristics' ] [ TileBusReceivePayloadCharacteristic ] , False , timeout )
Prevent this device from receiving more RPCs
107
9
22,422
def _write_handle ( self , conn , handle , ack , value , timeout = 1.0 ) : conn_handle = conn char_handle = handle def write_handle_acked ( event ) : if event . command_class == 4 and event . command == 1 : conn , _ , char = unpack ( "<BHH" , event . payload ) return conn_handle == conn and char_handle == char data_len = len ( value ) if data_len > 20 : return False , { 'reason' : 'Data too long to write' } payload = struct . pack ( "<BHB%ds" % data_len , conn_handle , char_handle , data_len , value ) try : if ack : response = self . _send_command ( 4 , 5 , payload ) else : response = self . _send_command ( 4 , 6 , payload ) except InternalTimeoutError : return False , { 'reason' : 'Timeout waiting for response to command in _write_handle' } _ , result = unpack ( "<BH" , response . payload ) if result != 0 : return False , { 'reason' : 'Error writing to handle' , 'error_code' : result } if ack : events = self . _wait_process_events ( timeout , lambda x : False , write_handle_acked ) if len ( events ) == 0 : return False , { 'reason' : 'Timeout waiting for acknowledge on write' } _ , result , _ = unpack ( "<BHH" , events [ 0 ] . payload ) if result != 0 : return False , { 'reason' : 'Error received during write to handle' , 'error_code' : result } return True , None
Write to a BLE device characteristic by its handle
368
10
22,423
def _set_advertising_data ( self , packet_type , data ) : payload = struct . pack ( "<BB%ss" % ( len ( data ) ) , packet_type , len ( data ) , bytes ( data ) ) response = self . _send_command ( 6 , 9 , payload ) result , = unpack ( "<H" , response . payload ) if result != 0 : return False , { 'reason' : 'Error code from BLED112 setting advertising data' , 'code' : result } return True , None
Set the advertising data for advertisements sent out by this bled112
114
13
22,424
def _set_mode ( self , discover_mode , connect_mode ) : payload = struct . pack ( "<BB" , discover_mode , connect_mode ) response = self . _send_command ( 6 , 1 , payload ) result , = unpack ( "<H" , response . payload ) if result != 0 : return False , { 'reason' : 'Error code from BLED112 setting mode' , 'code' : result } return True , None
Set the mode of the BLED112 used to enable and disable advertising
98
14
22,425
def _send_notification ( self , handle , value ) : value_len = len ( value ) value = bytes ( value ) payload = struct . pack ( "<BHB%ds" % value_len , 0xFF , handle , value_len , value ) response = self . _send_command ( 2 , 5 , payload ) result , = unpack ( "<H" , response . payload ) if result != 0 : return False , { 'reason' : 'Error code from BLED112 notifying a value' , 'code' : result , 'handle' : handle , 'value' : value } return True , None
Send a notification to all connected clients on a characteristic
134
10
22,426
def _disconnect ( self , handle ) : payload = struct . pack ( '<B' , handle ) response = self . _send_command ( 3 , 0 , payload ) conn_handle , result = unpack ( "<BH" , response . payload ) if result != 0 : self . _logger . info ( "Disconnection failed result=%d" , result ) return False , None assert conn_handle == handle def disconnect_succeeded ( event ) : if event . command_class == 3 and event . command == 4 : event_handle , = unpack ( "B" , event . payload [ 0 : 1 ] ) return event_handle == handle return False #FIXME Hardcoded timeout events = self . _wait_process_events ( 3.0 , lambda x : False , disconnect_succeeded ) if len ( events ) != 1 : return False , None return True , { 'handle' : handle }
Disconnect from a device that we have previously connected to
201
11
22,427
def _send_command ( self , cmd_class , command , payload , timeout = 3.0 ) : if len ( payload ) > 60 : return ValueError ( "Attempting to send a BGAPI packet with length > 60 is not allowed" , actual_length = len ( payload ) , command = command , command_class = cmd_class ) header = bytearray ( 4 ) header [ 0 ] = 0 header [ 1 ] = len ( payload ) header [ 2 ] = cmd_class header [ 3 ] = command packet = header + bytearray ( payload ) self . _stream . write ( bytes ( packet ) ) #Every command has a response so wait for the response here response = self . _receive_packet ( timeout ) return response
Send a BGAPI packet to the dongle and return the response
162
14
22,428
def _receive_packet ( self , timeout = 3.0 ) : while True : response_data = self . _stream . read_packet ( timeout = timeout ) response = BGAPIPacket ( is_event = ( response_data [ 0 ] == 0x80 ) , command_class = response_data [ 2 ] , command = response_data [ 3 ] , payload = response_data [ 4 : ] ) if response . is_event : if self . event_handler is not None : self . event_handler ( response ) continue return response
Receive a response packet to a command
120
8
22,429
def _wait_process_events ( self , total_time , return_filter , end_filter ) : acc = [ ] delta = 0.01 start_time = time . time ( ) end_time = start_time + total_time while time . time ( ) < end_time : events = self . _process_events ( lambda x : return_filter ( x ) or end_filter ( x ) , max_events = 1 ) acc += events for event in events : if end_filter ( event ) : return acc if len ( events ) == 0 : time . sleep ( delta ) return acc
Synchronously process events until a specific event is found or we timeout
129
14
22,430
def connect ( self , client_id ) : if self . client is not None : raise InternalError ( "Connect called on an alreaded connected MQTT client" ) client = AWSIoTPythonSDK . MQTTLib . AWSIoTMQTTClient ( client_id , useWebsocket = self . websockets ) if self . websockets : client . configureEndpoint ( self . endpoint , 443 ) client . configureCredentials ( self . root ) if self . iam_session is None : client . configureIAMCredentials ( self . iam_key , self . iam_secret ) else : client . configureIAMCredentials ( self . iam_key , self . iam_secret , self . iam_session ) else : client . configureEndpoint ( self . endpoint , 8883 ) client . configureCredentials ( self . root , self . key , self . cert ) client . configureOfflinePublishQueueing ( 0 ) try : client . connect ( ) self . client = client except operationError as exc : raise InternalError ( "Could not connect to AWS IOT" , message = exc . message ) self . sequencer . reset ( )
Connect to AWS IOT with the given client_id
260
11
22,431
def disconnect ( self ) : if self . client is None : return try : self . client . disconnect ( ) except operationError as exc : raise InternalError ( "Could not disconnect from AWS IOT" , message = exc . message )
Disconnect from AWS IOT message broker
49
8
22,432
def publish ( self , topic , message ) : seq = self . sequencer . next_id ( topic ) packet = { 'sequence' : seq , 'message' : message } # Need to encode bytes types for json.dumps if 'key' in packet [ 'message' ] : packet [ 'message' ] [ 'key' ] = packet [ 'message' ] [ 'key' ] . decode ( 'utf8' ) if 'payload' in packet [ 'message' ] : packet [ 'message' ] [ 'payload' ] = packet [ 'message' ] [ 'payload' ] . decode ( 'utf8' ) if 'script' in packet [ 'message' ] : packet [ 'message' ] [ 'script' ] = packet [ 'message' ] [ 'script' ] . decode ( 'utf8' ) if 'trace' in packet [ 'message' ] : packet [ 'message' ] [ 'trace' ] = packet [ 'message' ] [ 'trace' ] . decode ( 'utf8' ) if 'report' in packet [ 'message' ] : packet [ 'message' ] [ 'report' ] = packet [ 'message' ] [ 'report' ] . decode ( 'utf8' ) if 'received_time' in packet [ 'message' ] : packet [ 'message' ] [ 'received_time' ] = packet [ 'message' ] [ 'received_time' ] . decode ( 'utf8' ) serialized_packet = json . dumps ( packet ) try : # Limit how much we log in case the message is very long self . _logger . debug ( "Publishing %s on topic %s" , serialized_packet [ : 256 ] , topic ) self . client . publish ( topic , serialized_packet , 1 ) except operationError as exc : raise InternalError ( "Could not publish message" , topic = topic , message = exc . message )
Publish a json message to a topic with a type and a sequence number
417
15
22,433
def subscribe ( self , topic , callback , ordered = True ) : if '+' in topic or '#' in topic : regex = re . compile ( topic . replace ( '+' , '[^/]+' ) . replace ( '#' , '.*' ) ) self . wildcard_queues . append ( ( topic , regex , callback , ordered ) ) else : self . queues [ topic ] = PacketQueue ( 0 , callback , ordered ) try : self . client . subscribe ( topic , 1 , self . _on_receive ) except operationError as exc : raise InternalError ( "Could not subscribe to topic" , topic = topic , message = exc . message )
Subscribe to future messages in the given topic
146
8
22,434
def reset_sequence ( self , topic ) : if topic in self . queues : self . queues [ topic ] . reset ( )
Reset the expected sequence number for a topic
27
9
22,435
def unsubscribe ( self , topic ) : del self . queues [ topic ] try : self . client . unsubscribe ( topic ) except operationError as exc : raise InternalError ( "Could not unsubscribe from topic" , topic = topic , message = exc . message )
Unsubscribe from messages on a given topic
56
9
22,436
def _on_receive ( self , client , userdata , message ) : topic = message . topic encoded = message . payload try : packet = json . loads ( encoded ) except ValueError : self . _logger . warn ( "Could not decode json packet: %s" , encoded ) return try : seq = packet [ 'sequence' ] message_data = packet [ 'message' ] except KeyError : self . _logger . warn ( "Message received did not have required sequence and message keys: %s" , packet ) return # If we received a packet that does not fit into a queue, check our wildcard # queues if topic not in self . queues : found = False for _ , regex , callback , ordered in self . wildcard_queues : if regex . match ( topic ) : self . queues [ topic ] = PacketQueue ( 0 , callback , ordered ) found = True break if not found : self . _logger . warn ( "Received message for unknown topic: %s" , topic ) return self . queues [ topic ] . receive ( seq , [ seq , topic , message_data ] )
Callback called whenever we receive a message on a subscribed topic
239
11
22,437
def run ( self , resources ) : hwman = resources [ 'connection' ] con = hwman . hwman . controller ( ) test_interface = con . test_interface ( ) try : test_interface . synchronize_clock ( ) print ( 'Time currently set at %s' % test_interface . current_time_str ( ) ) except : raise ArgumentError ( 'Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated' )
Sets the RTC timestamp to UTC .
114
9
22,438
def add ( self , command , * args ) : cmd = Command ( command , args ) self . commands . append ( cmd )
Add a command to this command file .
27
8
22,439
def save ( self , outpath ) : with open ( outpath , "w" ) as outfile : outfile . write ( self . dump ( ) )
Save this command file as an ascii file .
34
11
22,440
def dump ( self ) : out = [ ] out . append ( self . filetype ) out . append ( "Format: {}" . format ( self . version ) ) out . append ( "Type: ASCII" ) out . append ( "" ) for cmd in self . commands : out . append ( self . encode ( cmd ) ) return "\n" . join ( out ) + "\n"
Dump all commands in this object to a string .
83
11
22,441
def FromString ( cls , indata ) : lines = [ x . strip ( ) for x in indata . split ( "\n" ) if not x . startswith ( '#' ) and not x . strip ( ) == "" ] if len ( lines ) < 3 : raise DataError ( "Invalid CommandFile string that did not contain 3 header lines" , lines = lines ) fmt_line , version_line , ascii_line = lines [ : 3 ] if not version_line . startswith ( "Format: " ) : raise DataError ( "Invalid format version that did not start with 'Format: '" , line = version_line ) version = version_line [ 8 : ] if ascii_line != "Type: ASCII" : raise DataError ( "Unknown file type line (expected Type: ASCII)" , line = ascii_line ) cmds = [ cls . decode ( x ) for x in lines [ 3 : ] ] return CommandFile ( fmt_line , version , cmds )
Load a CommandFile from a string .
222
8
22,442
def FromFile ( cls , inpath ) : with open ( inpath , "r" ) as infile : indata = infile . read ( ) return cls . FromString ( indata )
Load a CommandFile from a path .
44
8
22,443
def encode ( cls , command ) : args = [ ] for arg in command . args : if not isinstance ( arg , str ) : arg = str ( arg ) if "," in arg or arg . startswith ( " " ) or arg . endswith ( " " ) or arg . startswith ( "hex:" ) : arg = "hex:{}" . format ( hexlify ( arg . encode ( 'utf-8' ) ) . decode ( 'utf-8' ) ) args . append ( arg ) argstr = "" if len ( args ) > 0 : argstr = " {" + "," . join ( args ) + "}" return command . name + argstr
Encode a command as an unambiguous string .
148
10
22,444
def decode ( cls , command_str ) : name , _ , arg = command_str . partition ( " " ) args = [ ] if len ( arg ) > 0 : if arg [ 0 ] != '{' or arg [ - 1 ] != '}' : raise DataError ( "Invalid command, argument is not contained in { and }" , arg = arg , cmd = name ) arg = arg [ 1 : - 1 ] args = arg . split ( "," ) proc = [ ] for arg in args : if arg . startswith ( "hex:" ) : arg = unhexlify ( arg [ 4 : ] ) . decode ( 'utf-8' ) proc . append ( arg ) return Command ( name , proc )
Decode a string encoded command back into a Command object .
158
12
22,445
def receive ( self , sequence , args ) : # If we are told to ignore sequence numbers, just pass the packet on if not self . _reorder : self . _callback ( * args ) return # If this packet is in the past, drop it if self . _next_expected is not None and sequence < self . _next_expected : print ( "Dropping out of order packet, seq=%d" % sequence ) return self . _out_of_order . append ( ( sequence , args ) ) self . _out_of_order . sort ( key = lambda x : x [ 0 ] ) # If we have received packets, attempt to process them in order while len ( self . _out_of_order ) > 0 : seq , args = self . _out_of_order [ 0 ] if self . _next_expected is not None and seq != self . _next_expected : return self . _callback ( * args ) self . _out_of_order . pop ( 0 ) self . _next_expected = seq + 1
Receive one packet
225
4
22,446
def set_vars ( env ) : desired = env . get ( 'MWCW_VERSION' , '' ) # return right away if the variables are already set if isinstance ( desired , MWVersion ) : return 1 elif desired is None : return 0 versions = find_versions ( ) version = None if desired : for v in versions : if str ( v ) == desired : version = v elif versions : version = versions [ - 1 ] env [ 'MWCW_VERSIONS' ] = versions env [ 'MWCW_VERSION' ] = version if version is None : return 0 env . PrependENVPath ( 'PATH' , version . clpath ) env . PrependENVPath ( 'PATH' , version . dllpath ) ENV = env [ 'ENV' ] ENV [ 'CWFolder' ] = version . path ENV [ 'LM_LICENSE_FILE' ] = version . license plus = lambda x : '+%s' % x ENV [ 'MWCIncludes' ] = os . pathsep . join ( map ( plus , version . includes ) ) ENV [ 'MWLibraries' ] = os . pathsep . join ( map ( plus , version . libs ) ) return 1
Set MWCW_VERSION MWCW_VERSIONS and some codewarrior environment vars
267
21
22,447
def find_versions ( ) : versions = [ ] ### This function finds CodeWarrior by reading from the registry on ### Windows. Some other method needs to be implemented for other ### platforms, maybe something that calls env.WhereIs('mwcc') if SCons . Util . can_read_reg : try : HLM = SCons . Util . HKEY_LOCAL_MACHINE product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions' product_key = SCons . Util . RegOpenKeyEx ( HLM , product ) i = 0 while True : name = product + '\\' + SCons . Util . RegEnumKey ( product_key , i ) name_key = SCons . Util . RegOpenKeyEx ( HLM , name ) try : version = SCons . Util . RegQueryValueEx ( name_key , 'VERSION' ) path = SCons . Util . RegQueryValueEx ( name_key , 'PATH' ) mwv = MWVersion ( version [ 0 ] , path [ 0 ] , 'Win32-X86' ) versions . append ( mwv ) except SCons . Util . RegError : pass i = i + 1 except SCons . Util . RegError : pass return versions
Return a list of MWVersion objects representing installed versions
280
10
22,448
def generate ( env ) : import SCons . Defaults import SCons . Tool set_vars ( env ) static_obj , shared_obj = SCons . Tool . createObjBuilders ( env ) for suffix in CSuffixes : static_obj . add_action ( suffix , SCons . Defaults . CAction ) shared_obj . add_action ( suffix , SCons . Defaults . ShCAction ) for suffix in CXXSuffixes : static_obj . add_action ( suffix , SCons . Defaults . CXXAction ) shared_obj . add_action ( suffix , SCons . Defaults . ShCXXAction ) env [ 'CCCOMFLAGS' ] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES' env [ 'CC' ] = 'mwcc' env [ 'CCCOM' ] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS' env [ 'CXX' ] = 'mwcc' env [ 'CXXCOM' ] = '$CXX $CXXFLAGS $CCCOMFLAGS' env [ 'SHCC' ] = '$CC' env [ 'SHCCFLAGS' ] = '$CCFLAGS' env [ 'SHCFLAGS' ] = '$CFLAGS' env [ 'SHCCCOM' ] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS' env [ 'SHCXX' ] = '$CXX' env [ 'SHCXXFLAGS' ] = '$CXXFLAGS' env [ 'SHCXXCOM' ] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS' env [ 'CFILESUFFIX' ] = '.c' env [ 'CXXFILESUFFIX' ] = '.cpp' env [ 'CPPDEFPREFIX' ] = '-D' env [ 'CPPDEFSUFFIX' ] = '' env [ 'INCPREFIX' ] = '-I' env [ 'INCSUFFIX' ] = ''
Add Builders and construction variables for the mwcc to an Environment .
474
15
22,449
def run ( self , resources ) : if not resources [ 'connection' ] . _port . startswith ( 'jlink' ) : raise ArgumentError ( "FlashBoardStep is currently only possible through jlink" , invalid_port = args [ 'port' ] ) hwman = resources [ 'connection' ] debug = hwman . hwman . debug ( self . _debug_string ) debug . flash ( self . _file )
Runs the flash step
96
5
22,450
def copyto_emitter ( target , source , env ) : n_target = [ ] for t in target : n_target = n_target + [ t . File ( str ( s ) ) for s in source ] return ( n_target , source )
changes the path of the source to be under the target ( which are assumed to be directories .
56
19
22,451
def getPharLapPath ( ) : if not SCons . Util . can_read_reg : raise SCons . Errors . InternalError ( "No Windows registry module was found" ) try : k = SCons . Util . RegOpenKeyEx ( SCons . Util . HKEY_LOCAL_MACHINE , 'SOFTWARE\\Pharlap\\ETS' ) val , type = SCons . Util . RegQueryValueEx ( k , 'BaseDir' ) # The following is a hack...there is (not surprisingly) # an odd issue in the Phar Lap plug in that inserts # a bunch of junk data after the phar lap path in the # registry. We must trim it. idx = val . find ( '\0' ) if idx >= 0 : val = val [ : idx ] return os . path . normpath ( val ) except SCons . Util . RegError : raise SCons . Errors . UserError ( "Cannot find Phar Lap ETS path in the registry. Is it installed properly?" )
Reads the registry to find the installed path of the Phar Lap ETS development kit .
227
18
22,452
def addPharLapPaths ( env ) : ph_path = getPharLapPath ( ) try : env_dict = env [ 'ENV' ] except KeyError : env_dict = { } env [ 'ENV' ] = env_dict SCons . Util . AddPathIfNotExists ( env_dict , 'PATH' , os . path . join ( ph_path , 'bin' ) ) SCons . Util . AddPathIfNotExists ( env_dict , 'INCLUDE' , os . path . join ( ph_path , 'include' ) ) SCons . Util . AddPathIfNotExists ( env_dict , 'LIB' , os . path . join ( ph_path , 'lib' ) ) SCons . Util . AddPathIfNotExists ( env_dict , 'LIB' , os . path . join ( ph_path , os . path . normpath ( 'lib/vclib' ) ) ) env [ 'PHARLAP_PATH' ] = getPharLapPath ( ) env [ 'PHARLAP_VERSION' ] = str ( getPharLapVersion ( ) )
This function adds the path to the Phar Lap binaries includes and libraries if they are not already there .
260
20
22,453
def _update_or_init_po_files ( target , source , env ) : import SCons . Action from SCons . Tool . GettextCommon import _init_po_files for tgt in target : if tgt . rexists ( ) : action = SCons . Action . Action ( '$MSGMERGECOM' , '$MSGMERGECOMSTR' ) else : action = _init_po_files status = action ( [ tgt ] , source , env ) if status : return status return 0
Action function for POUpdate builder
114
8
22,454
def _POUpdateBuilder ( env , * * kw ) : import SCons . Action from SCons . Tool . GettextCommon import _POFileBuilder action = SCons . Action . Action ( _update_or_init_po_files , None ) return _POFileBuilder ( env , action = action , target_alias = '$POUPDATE_ALIAS' )
Create an object of POUpdate builder
83
9
22,455
def _POUpdateBuilderWrapper ( env , target = None , source = _null , * * kw ) : if source is _null : if 'POTDOMAIN' in kw : domain = kw [ 'POTDOMAIN' ] elif 'POTDOMAIN' in env and env [ 'POTDOMAIN' ] : domain = env [ 'POTDOMAIN' ] else : domain = 'messages' source = [ domain ] # NOTE: Suffix shall be appended automatically return env . _POUpdateBuilder ( target , source , * * kw )
Wrapper for POUpdate builder - make user s life easier
130
14
22,456
def generate ( env , * * kw ) : from SCons . Tool . GettextCommon import _detect_msgmerge try : env [ 'MSGMERGE' ] = _detect_msgmerge ( env ) except : env [ 'MSGMERGE' ] = 'msgmerge' env . SetDefault ( POTSUFFIX = [ '.pot' ] , POSUFFIX = [ '.po' ] , MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE' , MSGMERGECOMSTR = '' , MSGMERGEFLAGS = [ ] , POUPDATE_ALIAS = 'po-update' ) env . Append ( BUILDERS = { '_POUpdateBuilder' : _POUpdateBuilder ( env ) } ) env . AddMethod ( _POUpdateBuilderWrapper , 'POUpdate' ) env . AlwaysBuild ( env . Alias ( '$POUPDATE_ALIAS' ) )
Generate the xgettext tool
226
7
22,457
def _create_filter ( self ) : self . _product_filter = { } for chip in itertools . chain ( iter ( self . _family . targets ( self . _tile . short_name ) ) , iter ( [ self . _family . platform_independent_target ( ) ] ) ) : for key , prods in chip . property ( 'depends' , { } ) . items ( ) : name , _ , _ = key . partition ( ',' ) for prod in prods : if prod not in self . _product_filter : self . _product_filter [ prod ] = set ( ) self . _product_filter [ prod ] . add ( name )
Create a filter of all of the dependency products that we have selected .
146
14
22,458
def _create_product_map ( self ) : self . _product_map = { } for dep in self . _tile . dependencies : try : dep_tile = IOTile ( os . path . join ( 'build' , 'deps' , dep [ 'unique_id' ] ) ) except ( ArgumentError , EnvironmentError ) : raise BuildError ( "Could not find required dependency" , name = dep [ 'name' ] ) self . _add_products ( dep_tile ) self . _add_products ( self . _tile , show_all = True )
Create a map of all products produced by this or a dependency .
124
13
22,459
def _add_products ( self , tile , show_all = False ) : products = tile . products unique_id = tile . unique_id base_path = tile . output_folder for prod_path , prod_type in products . items ( ) : # We need to handle include_directories and tilebus_definitions # specially since those are stored reversed in module_settings.json # for historical reasons. Currently we don't support resolving # tilebus_definitions or include_directories in ProductResolver if prod_path == 'tilebus_definitions' or prod_path == 'include_directories' : continue if prod_type in self . IGNORED_PRODUCTS : continue prod_base = os . path . basename ( prod_path ) if prod_type not in self . _product_map : self . _product_map [ prod_type ] = { } prod_map = self . _product_map [ prod_type ] if prod_base not in prod_map : prod_map [ prod_base ] = [ ] full_path = os . path . normpath ( os . path . join ( base_path , prod_path ) ) info = ProductInfo ( prod_base , full_path , unique_id , not show_all and prod_base not in self . _product_filter ) prod_map [ prod_base ] . append ( info )
Add all products from a tile into our product map .
300
11
22,460
def find_all ( self , product_type , short_name , include_hidden = False ) : all_prods = [ ] # If product_type is not return products of all types if product_type is None : for prod_dict in self . _product_map . values ( ) : all_prods . extend ( [ prod for prod in prod_dict . get ( short_name , [ ] ) if include_hidden or not prod . hidden ] ) return all_prods all_prods = self . _product_map . get ( product_type , { } ) return [ prod for prod in all_prods . get ( short_name , [ ] ) if include_hidden or not prod . hidden ]
Find all providers of a given product by its short name .
156
12
22,461
def find_unique ( self , product_type , short_name , include_hidden = False ) : prods = self . find_all ( product_type , short_name , include_hidden ) if len ( prods ) == 0 : raise BuildError ( "Could not find product by name in find_unique" , name = short_name , type = product_type ) if len ( prods ) > 1 : raise BuildError ( "Multiple providers of the same product in find_unique" , name = short_name , type = product_type , products = prods ) if self . _tracking : self . _resolved_products . append ( prods [ 0 ] ) return prods [ 0 ]
Find the unique provider of a given product by its short name .
152
13
22,462
def main ( raw_args = None ) : multifile_choices = frozenset ( [ 'c_files' ] ) if raw_args is None : raw_args = sys . argv [ 1 : ] parser = build_parser ( ) args = parser . parse_args ( raw_args ) if args . output is None and args . format in multifile_choices : print ( "You must specify an output file with -o, --output when " "using a format that produces multiple files (-f %s)" % args . format ) return 1 desc = TBDescriptor ( args . bus_definition ) if args . format == 'json' : print ( "JSON output is not yet supported" ) return 1 block = desc . get_block ( ) template_map = { 'command_map_c' : 'command_map_c.c.tpl' , 'command_map_h' : 'command_map_c.h.tpl' , 'config_map_c' : 'config_variables_c.c.tpl' , 'config_map_h' : 'config_variables_c.h.tpl' } template_name = template_map . get ( args . format ) data = block . render_template ( template_name ) print ( data ) return 0
Run the iotile - tbcompile script .
287
12
22,463
def generate ( env ) : client = Builder ( action = rpcgen_client , suffix = '_clnt.c' , src_suffix = '.x' ) header = Builder ( action = rpcgen_header , suffix = '.h' , src_suffix = '.x' ) service = Builder ( action = rpcgen_service , suffix = '_svc.c' , src_suffix = '.x' ) xdr = Builder ( action = rpcgen_xdr , suffix = '_xdr.c' , src_suffix = '.x' ) env . Append ( BUILDERS = { 'RPCGenClient' : client , 'RPCGenHeader' : header , 'RPCGenService' : service , 'RPCGenXDR' : xdr } ) env [ 'RPCGEN' ] = 'rpcgen' env [ 'RPCGENFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RPCGENCLIENTFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RPCGENHEADERFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RPCGENSERVICEFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'RPCGENXDRFLAGS' ] = SCons . Util . CLVar ( '' )
Add RPCGEN Builders and construction variables for an Environment .
310
12
22,464
def build_parser ( ) : parser = argparse . ArgumentParser ( "Release packages to pypi" ) parser . add_argument ( '--check' , '-c' , action = "store_true" , help = "Do a dry run without uploading" ) parser . add_argument ( 'component' , help = "The component to release as component-version" ) return parser
Build argument parsers .
84
5
22,465
def get_release_component ( comp ) : name , vers = comp . split ( "-" ) if name not in comp_names : print ( "Known components:" ) for comp in comp_names : print ( "- %s" % comp ) raise EnvironmentError ( "Unknown release component name '%s'" % name ) return name , vers
Split the argument passed on the command line into a component name and expected version
72
15
22,466
def check_compatibility ( name ) : comp = comp_names [ name ] if sys . version_info . major < 3 and comp . compat == "python3" : return False if sys . version_info . major >= 3 and comp . compat != "python3" : return False return True
Verify if we can release this component on the running interpreter .
63
13
22,467
def build_component ( component ) : comp = comp_names [ component ] curr = os . getcwd ( ) os . chdir ( comp . path ) args = [ '-q' , 'clean' , 'sdist' , 'bdist_wheel' ] if comp . compat == 'universal' : args . append ( '--universal' ) try : setuptools . sandbox . run_setup ( 'setup.py' , args ) finally : os . chdir ( curr )
Create an sdist and a wheel for the desired component
107
11
22,468
def uuid_to_slug ( uuid ) : if not isinstance ( uuid , int ) : raise ArgumentError ( "Invalid id that is not an integer" , id = uuid ) if uuid < 0 or uuid > 0x7fffffff : # For now, limiting support to a signed integer (which on some platforms, can be 32bits) raise ArgumentError ( "Integer should be a positive number and smaller than 0x7fffffff" , id = uuid ) return '--' . join ( [ 'd' , int64gid ( uuid ) ] )
Return IOTile Cloud compatible Device Slug
127
8
22,469
def package ( env , target , source , PACKAGEROOT , NAME , VERSION , DESCRIPTION , SUMMARY , X_IPK_PRIORITY , X_IPK_SECTION , SOURCE_URL , X_IPK_MAINTAINER , X_IPK_DEPENDS , * * kw ) : SCons . Tool . Tool ( 'ipkg' ) . generate ( env ) # setup the Ipkg builder bld = env [ 'BUILDERS' ] [ 'Ipkg' ] target , source = stripinstallbuilder ( target , source , env ) target , source = putintopackageroot ( target , source , env , PACKAGEROOT ) # This should be overrideable from the construction environment, # which it is by using ARCHITECTURE=. # Guessing based on what os.uname() returns at least allows it # to work for both i386 and x86_64 Linux systems. archmap = { 'i686' : 'i386' , 'i586' : 'i386' , 'i486' : 'i386' , } buildarchitecture = os . uname ( ) [ 4 ] buildarchitecture = archmap . get ( buildarchitecture , buildarchitecture ) if 'ARCHITECTURE' in kw : buildarchitecture = kw [ 'ARCHITECTURE' ] # setup the kw to contain the mandatory arguments to this function. # do this before calling any builder or setup function loc = locals ( ) del loc [ 'kw' ] kw . update ( loc ) del kw [ 'source' ] , kw [ 'target' ] , kw [ 'env' ] # generate the specfile specfile = gen_ipk_dir ( PACKAGEROOT , source , env , kw ) # override the default target. if str ( target [ 0 ] ) == "%s-%s" % ( NAME , VERSION ) : target = [ "%s_%s_%s.ipk" % ( NAME , VERSION , buildarchitecture ) ] # now apply the Ipkg builder return bld ( env , target , specfile , * * kw )
This function prepares the packageroot directory for packaging with the ipkg builder .
473
16
22,470
def build_specfiles ( source , target , env ) : # # At first we care for the CONTROL/control file, which is the main file for ipk. # # For this we need to open multiple files in random order, so we store into # a dict so they can be easily accessed. # # opened_files = { } def open_file ( needle , haystack ) : try : return opened_files [ needle ] except KeyError : file = filter ( lambda x : x . get_path ( ) . rfind ( needle ) != - 1 , haystack ) [ 0 ] opened_files [ needle ] = open ( file . get_abspath ( ) , 'w' ) return opened_files [ needle ] control_file = open_file ( 'control' , target ) if 'X_IPK_DESCRIPTION' not in env : env [ 'X_IPK_DESCRIPTION' ] = "%s\n %s" % ( env [ 'SUMMARY' ] , env [ 'DESCRIPTION' ] . replace ( '\n' , '\n ' ) ) content = """ Package: $NAME Version: $VERSION Priority: $X_IPK_PRIORITY Section: $X_IPK_SECTION Source: $SOURCE_URL Architecture: $ARCHITECTURE Maintainer: $X_IPK_MAINTAINER Depends: $X_IPK_DEPENDS Description: $X_IPK_DESCRIPTION """ control_file . write ( env . subst ( content ) ) # # now handle the various other files, which purpose it is to set post-, # pre-scripts and mark files as config files. # # We do so by filtering the source files for files which are marked with # the "config" tag and afterwards we do the same for x_ipk_postrm, # x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags. # # The first one will write the name of the file into the file # CONTROL/configfiles, the latter add the content of the x_ipk_* variable # into the same named file. # for f in [ x for x in source if 'PACKAGING_CONFIG' in dir ( x ) ] : config = open_file ( 'conffiles' ) config . write ( f . PACKAGING_INSTALL_LOCATION ) config . write ( '\n' ) for str in 'POSTRM PRERM POSTINST PREINST' . split ( ) : name = "PACKAGING_X_IPK_%s" % str for f in [ x for x in source if name in dir ( x ) ] : file = open_file ( name ) file . write ( env [ str ] ) # # close all opened files for f in list ( opened_files . values ( ) ) : f . close ( ) # call a user specified function if 'CHANGE_SPECFILE' in env : content += env [ 'CHANGE_SPECFILE' ] ( target ) return 0
Filter the targets for the needed files and use the variables in env to create the specfile .
660
19
22,471
def emit_java_headers ( target , source , env ) : class_suffix = env . get ( 'JAVACLASSSUFFIX' , '.class' ) classdir = env . get ( 'JAVACLASSDIR' ) if not classdir : try : s = source [ 0 ] except IndexError : classdir = '.' else : try : classdir = s . attributes . java_classdir except AttributeError : classdir = '.' classdir = env . Dir ( classdir ) . rdir ( ) if str ( classdir ) == '.' : c_ = None else : c_ = str ( classdir ) + os . sep slist = [ ] for src in source : try : classname = src . attributes . java_classname except AttributeError : classname = str ( src ) if c_ and classname [ : len ( c_ ) ] == c_ : classname = classname [ len ( c_ ) : ] if class_suffix and classname [ - len ( class_suffix ) : ] == class_suffix : classname = classname [ : - len ( class_suffix ) ] classname = SCons . Tool . javac . classname ( classname ) s = src . rfile ( ) s . attributes . java_classname = classname slist . append ( s ) s = source [ 0 ] . rfile ( ) if not hasattr ( s . attributes , 'java_classdir' ) : s . attributes . java_classdir = classdir if target [ 0 ] . __class__ is SCons . Node . FS . File : tlist = target else : if not isinstance ( target [ 0 ] , SCons . Node . FS . Dir ) : target [ 0 ] . __class__ = SCons . Node . FS . Dir target [ 0 ] . _morph ( ) tlist = [ ] for s in source : fname = s . attributes . java_classname . replace ( '.' , '_' ) + '.h' t = target [ 0 ] . File ( fname ) t . attributes . java_lookupdir = target [ 0 ] tlist . append ( t ) return tlist , source
Create and return lists of Java stub header files that will be created from a set of class files .
479
20
22,472
def generate ( env ) : java_javah = SCons . Tool . CreateJavaHBuilder ( env ) java_javah . emitter = emit_java_headers env [ '_JAVAHOUTFLAG' ] = JavaHOutFlagGenerator env [ 'JAVAH' ] = 'javah' env [ 'JAVAHFLAGS' ] = SCons . Util . CLVar ( '' ) env [ '_JAVAHCLASSPATH' ] = getJavaHClassPath env [ 'JAVAHCOM' ] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}' env [ 'JAVACLASSSUFFIX' ] = '.class'
Add Builders and construction variables for javah to an Environment .
178
14
22,473
def dump ( self ) : return { u'storage_data' : [ x . asdict ( ) for x in self . storage_data ] , u'streaming_data' : [ x . asdict ( ) for x in self . streaming_data ] }
Serialize the state of this InMemoryStorageEngine to a dict .
59
14
22,474
def restore ( self , state ) : storage_data = state . get ( u'storage_data' , [ ] ) streaming_data = state . get ( u'streaming_data' , [ ] ) if len ( storage_data ) > self . storage_length or len ( streaming_data ) > self . streaming_length : raise ArgumentError ( "Cannot restore InMemoryStorageEngine, too many readings" , storage_size = len ( storage_data ) , storage_max = self . storage_length , streaming_size = len ( streaming_data ) , streaming_max = self . streaming_length ) self . storage_data = [ IOTileReading . FromDict ( x ) for x in storage_data ] self . streaming_data = [ IOTileReading . FromDict ( x ) for x in streaming_data ]
Restore the state of this InMemoryStorageEngine from a dict .
184
14
22,475
def count_matching ( self , selector , offset = 0 ) : if selector . output : data = self . streaming_data elif selector . buffered : data = self . storage_data else : raise ArgumentError ( "You can only pass a buffered selector to count_matching" , selector = selector ) count = 0 for i in range ( offset , len ( data ) ) : reading = data [ i ] stream = DataStream . FromEncoded ( reading . stream ) if selector . matches ( stream ) : count += 1 return count
Count the number of readings matching selector .
115
8
22,476
def scan_storage ( self , area_name , callable , start = 0 , stop = None ) : if area_name == u'storage' : data = self . storage_data elif area_name == u'streaming' : data = self . streaming_data else : raise ArgumentError ( "Unknown area name in scan_storage (%s) should be storage or streaming" % area_name ) if len ( data ) == 0 : return 0 if stop is None : stop = len ( data ) - 1 elif stop >= len ( data ) : raise ArgumentError ( "Given stop offset is greater than the highest offset supported" , length = len ( data ) , stop_offset = stop ) scanned = 0 for i in range ( start , stop + 1 ) : scanned += 1 should_break = callable ( i , data [ i ] ) if should_break is True : break return scanned
Iterate over streaming or storage areas calling callable .
193
11
22,477
def push ( self , value ) : stream = DataStream . FromEncoded ( value . stream ) if stream . stream_type == DataStream . OutputType : if len ( self . streaming_data ) == self . streaming_length : raise StorageFullError ( 'Streaming buffer full' ) self . streaming_data . append ( value ) else : if len ( self . storage_data ) == self . storage_length : raise StorageFullError ( 'Storage buffer full' ) self . storage_data . append ( value )
Store a new value for the given stream .
111
9
22,478
def get ( self , buffer_type , offset ) : if buffer_type == u'streaming' : chosen_buffer = self . streaming_data else : chosen_buffer = self . storage_data if offset >= len ( chosen_buffer ) : raise StreamEmptyError ( "Invalid index given in get command" , requested = offset , stored = len ( chosen_buffer ) , buffer = buffer_type ) return chosen_buffer [ offset ]
Get a reading from the buffer at offset .
94
9
22,479
def popn ( self , buffer_type , count ) : buffer_type = str ( buffer_type ) if buffer_type == u'streaming' : chosen_buffer = self . streaming_data else : chosen_buffer = self . storage_data if count > len ( chosen_buffer ) : raise StreamEmptyError ( "Not enough data in buffer for popn command" , requested = count , stored = len ( chosen_buffer ) , buffer = buffer_type ) popped = chosen_buffer [ : count ] remaining = chosen_buffer [ count : ] if buffer_type == u'streaming' : self . streaming_data = remaining else : self . storage_data = remaining return popped
Remove and return the oldest count values from the named buffer
149
11
22,480
async def send_script ( self , conn_id , data ) : self . _ensure_connection ( conn_id , True ) connection_string = self . _get_property ( conn_id , "connection_string" ) msg = dict ( connection_string = connection_string , fragment_count = 1 , fragment_index = 0 , script = base64 . b64encode ( data ) ) await self . _send_command ( OPERATIONS . SEND_SCRIPT , msg , COMMANDS . SendScriptResponse )
Send a a script to this IOTile device
115
10
22,481
async def _on_report_notification ( self , event ) : conn_string = event . get ( 'connection_string' ) report = self . _report_parser . deserialize_report ( event . get ( 'serialized_report' ) ) self . notify_event ( conn_string , 'report' , report )
Callback function called when a report event is received .
73
10
22,482
async def _on_trace_notification ( self , trace_event ) : conn_string = trace_event . get ( 'connection_string' ) payload = trace_event . get ( 'payload' ) await self . notify_event ( conn_string , 'trace' , payload )
Callback function called when a trace chunk is received .
64
10
22,483
async def _on_progress_notification ( self , progress ) : conn_string = progress . get ( 'connection_string' ) done = progress . get ( 'done_count' ) total = progress . get ( 'total_count' ) operation = progress . get ( 'operation' ) await self . notify_progress ( conn_string , operation , done , total , wait = True )
Callback function called when a progress notification is received .
85
10
22,484
def _extract_variables ( param ) : variables = set ( ) if isinstance ( param , list ) : variables . update ( * [ _extract_variables ( x ) for x in param ] ) elif isinstance ( param , dict ) : variables . update ( * [ _extract_variables ( x ) for x in param . values ( ) ] ) elif isinstance ( param , str ) : for match in re . finditer ( TEMPLATE_REGEX , param ) : if match . group ( 'short_id' ) is not None : variables . add ( match . group ( 'short_id' ) ) else : variables . add ( match . group ( 'long_id' ) ) return variables
Find all template variables in args .
159
7
22,485
def _run_step ( step_obj , step_declaration , initialized_resources ) : start_time = time . time ( ) # Open any resources that need to be opened before we run this step for res_name in step_declaration . resources . opened : initialized_resources [ res_name ] . open ( ) # Create a dictionary of all of the resources that are required for this step used_resources = { local_name : initialized_resources [ global_name ] for local_name , global_name in step_declaration . resources . used . items ( ) } # Allow steps with no resources to not need a resources keyword parameter if len ( used_resources ) > 0 : out = step_obj . run ( resources = used_resources ) else : out = step_obj . run ( ) # Close any resources that need to be closed before we run this step for res_name in step_declaration . resources . closed : initialized_resources [ res_name ] . close ( ) end_time = time . time ( ) return ( end_time - start_time , out )
Actually run a step .
232
5
22,486
def archive ( self , output_path ) : if self . path is None : raise ArgumentError ( "Cannot archive a recipe yet without a reference to its original yaml file in self.path" ) outfile = zipfile . ZipFile ( output_path , 'w' , zipfile . ZIP_DEFLATED ) outfile . write ( self . path , arcname = "recipe_script.yaml" ) written_files = set ( ) for _factory , args , _resources , files in self . steps : for arg_name in files : file_path = args [ arg_name ] if file_path in written_files : continue if os . path . basename ( file_path ) != file_path : raise ArgumentError ( "Cannot archive a recipe yet that references file not in the same directory as the recipe" ) full_path = os . path . join ( os . path . dirname ( self . path ) , file_path ) outfile . write ( full_path , arcname = file_path ) written_files . add ( file_path )
Archive this recipe and all associated files into a . ship archive .
234
14
22,487
def FromArchive ( cls , path , actions_dict , resources_dict , temp_dir = None ) : if not path . endswith ( ".ship" ) : raise ArgumentError ( "Attempted to unpack a recipe archive from a file that did not end in .ship" , path = path ) name = os . path . basename ( path ) [ : - 5 ] if temp_dir is None : temp_dir = tempfile . mkdtemp ( ) extract_path = os . path . join ( temp_dir , name ) archive = zipfile . ZipFile ( path , "r" ) archive . extractall ( extract_path ) recipe_yaml = os . path . join ( extract_path , 'recipe_script.yaml' ) return cls . FromFile ( recipe_yaml , actions_dict , resources_dict , name = name )
Create a RecipeObject from a . ship archive .
190
10
22,488
def FromFile ( cls , path , actions_dict , resources_dict , file_format = "yaml" , name = None ) : format_map = { "yaml" : cls . _process_yaml } format_handler = format_map . get ( file_format ) if format_handler is None : raise ArgumentError ( "Unknown file format or file extension" , file_format = file_format , known_formats = [ x for x in format_map if format_map [ x ] is not None ] ) recipe_info = format_handler ( path ) if name is None : name , _ext = os . path . splitext ( os . path . basename ( path ) ) # Validate that the recipe file is correctly formatted try : recipe_info = RecipeSchema . verify ( recipe_info ) except ValidationError as exc : raise RecipeFileInvalid ( "Recipe file does not match expected schema" , file = path , error_message = exc . msg , * * exc . params ) description = recipe_info . get ( 'description' ) # Parse out global default and shared resource information try : resources = cls . _parse_resource_declarations ( recipe_info . get ( 'resources' , [ ] ) , resources_dict ) defaults = cls . _parse_variable_defaults ( recipe_info . get ( "defaults" , [ ] ) ) steps = [ ] for i , action in enumerate ( recipe_info . get ( 'actions' , [ ] ) ) : action_name = action . pop ( 'name' ) if action_name is None : raise RecipeFileInvalid ( "Action is missing required name parameter" , parameters = action , path = path ) action_class = actions_dict . get ( action_name ) if action_class is None : raise UnknownRecipeActionType ( "Unknown step specified in recipe" , action = action_name , step = i + 1 , path = path ) # Parse out any resource usage in this step and make sure we only # use named resources step_resources = cls . _parse_resource_usage ( action , declarations = resources ) fixed_files , _variable_files = cls . _parse_file_usage ( action_class , action ) step = RecipeStep ( action_class , action , step_resources , fixed_files ) steps . append ( step ) return RecipeObject ( name , description , steps , resources , defaults , path ) except RecipeFileInvalid as exc : cls . _future_raise ( RecipeFileInvalid , RecipeFileInvalid ( exc . msg , recipe = name , * * exc . params ) , sys . exc_info ( ) [ 2 ] )
Create a RecipeObject from a file .
577
8
22,489
def _parse_file_usage ( cls , action_class , args ) : fixed_files = { } variable_files = [ ] if not hasattr ( action_class , 'FILES' ) : return fixed_files , variable_files for file_arg in action_class . FILES : arg_value = args . get ( file_arg ) if arg_value is None : raise RecipeFileInvalid ( "Action lists a file argument but none was given" , declared_argument = file_arg , passed_arguments = args ) variables = _extract_variables ( arg_value ) if len ( variables ) == 0 : fixed_files [ file_arg ] = arg_value else : variable_files . append ( arg_value ) return fixed_files , variable_files
Find all external files referenced by an action .
169
9
22,490
def _parse_resource_declarations ( cls , declarations , resource_map ) : resources = { } for decl in declarations : name = decl . pop ( 'name' ) typename = decl . pop ( 'type' ) desc = decl . pop ( 'description' , None ) autocreate = decl . pop ( 'autocreate' , False ) args = decl res_type = resource_map . get ( typename ) if res_type is None : raise UnknownRecipeResourceType ( "Could not find shared resource type" , type = typename , name = name ) # If the resource defines an argument schema, make sure we enforce it. if hasattr ( res_type , "ARG_SCHEMA" ) : try : args = res_type . ARG_SCHEMA . verify ( args ) except ValidationError as exc : raise RecipeFileInvalid ( "Recipe file resource declarttion has invalid parameters" , resource = name , error_message = exc . msg , * * exc . params ) if name in resources : raise RecipeFileInvalid ( "Attempted to add two shared resources with the same name" , name = name ) res = ResourceDeclaration ( name , resource_map . get ( typename ) , args , autocreate , desc , typename ) resources [ name ] = res return resources
Parse out what resources are declared as shared for this recipe .
283
13
22,491
def _parse_variable_defaults ( cls , defaults ) : default_dict = { } for item in defaults : key = next ( iter ( item ) ) value = item [ key ] if key in default_dict : raise RecipeFileInvalid ( "Default variable value specified twice" , name = key , old_value = default_dict [ key ] , new_value = value ) default_dict [ key ] = value return default_dict
Parse out all of the variable defaults .
94
9
22,492
def _parse_resource_usage ( cls , action_dict , declarations ) : raw_used = action_dict . pop ( 'use' , [ ] ) opened = [ x . strip ( ) for x in action_dict . pop ( 'open_before' , [ ] ) ] closed = [ x . strip ( ) for x in action_dict . pop ( 'close_after' , [ ] ) ] used = { } for resource in raw_used : if 'as' in resource : global_name , _ , local_name = resource . partition ( 'as' ) global_name = global_name . strip ( ) local_name = local_name . strip ( ) if len ( global_name ) == 0 or len ( local_name ) == 0 : raise RecipeFileInvalid ( "Resource usage specified in action with invalid name using 'as' statement" , global_name = global_name , local_name = local_name , statement = resource ) else : global_name = resource . strip ( ) local_name = global_name if local_name in used : raise RecipeFileInvalid ( "Resource specified twice for action" , args = action_dict , resource = local_name , used_resources = used ) used [ local_name ] = global_name # Make sure we only use, open and close declared resources for name in ( x for x in used . values ( ) if x not in declarations ) : raise RecipeFileInvalid ( "Action makes use of non-declared shared resource" , name = name ) for name in ( x for x in opened if x not in declarations ) : raise RecipeFileInvalid ( "Action specified a non-declared shared resource in open_before" , name = name ) for name in ( x for x in closed if x not in declarations ) : raise RecipeFileInvalid ( "Action specified a non-declared shared resource in close_after" , name = name ) return ResourceUsage ( used , opened , closed )
Parse out what resources are used opened and closed in an action step .
419
15
22,493
def prepare ( self , variables ) : initializedsteps = [ ] if variables is None : variables = dict ( ) for step , params , _resources , _files in self . steps : new_params = _complete_parameters ( params , variables ) initializedsteps . append ( step ( new_params ) ) return initializedsteps
Initialize all steps in this recipe using their parameters .
67
11
22,494
def _prepare_resources ( self , variables , overrides = None ) : if overrides is None : overrides = { } res_map = { } own_map = { } for decl in self . resources . values ( ) : resource = overrides . get ( decl . name ) if resource is None : args = _complete_parameters ( decl . args , variables ) resource = decl . type ( args ) own_map [ decl . name ] = resource if decl . autocreate : resource . open ( ) res_map [ decl . name ] = resource return res_map , own_map
Create and optionally open all shared resources .
129
8
22,495
def _cleanup_resources ( self , initialized_resources ) : cleanup_errors = [ ] # Make sure we clean up all resources that we can and don't error out at the # first one. for name , res in initialized_resources . items ( ) : try : if res . opened : res . close ( ) except Exception : _type , value , traceback = sys . exc_info ( ) cleanup_errors . append ( ( name , value , traceback ) ) if len ( cleanup_errors ) > 0 : raise RecipeResourceManagementError ( operation = "resource cleanup" , errors = cleanup_errors )
Cleanup all resources that we own that are open .
129
11
22,496
def run ( self , variables = None , overrides = None ) : old_dir = os . getcwd ( ) try : os . chdir ( self . run_directory ) initialized_steps = self . prepare ( variables ) owned_resources = { } try : print ( "Running in %s" % self . run_directory ) initialized_resources , owned_resources = self . _prepare_resources ( variables , overrides ) for i , ( step , decl ) in enumerate ( zip ( initialized_steps , self . steps ) ) : print ( "===> Step %d: %s\t Description: %s" % ( i + 1 , self . steps [ i ] [ 0 ] . __name__ , self . steps [ i ] [ 1 ] . get ( 'description' , '' ) ) ) runtime , out = _run_step ( step , decl , initialized_resources ) print ( "======> Time Elapsed: %.2f seconds" % runtime ) if out is not None : print ( out [ 1 ] ) finally : self . _cleanup_resources ( owned_resources ) finally : os . chdir ( old_dir )
Initialize and run this recipe .
248
7
22,497
def generate ( env ) : c_file , cxx_file = SCons . Tool . createCFileBuilders ( env ) # C c_file . add_action ( '.y' , YaccAction ) c_file . add_emitter ( '.y' , yEmitter ) c_file . add_action ( '.yacc' , YaccAction ) c_file . add_emitter ( '.yacc' , yEmitter ) # Objective-C c_file . add_action ( '.ym' , YaccAction ) c_file . add_emitter ( '.ym' , ymEmitter ) # C++ cxx_file . add_action ( '.yy' , YaccAction ) cxx_file . add_emitter ( '.yy' , yyEmitter ) env [ 'YACC' ] = env . Detect ( 'bison' ) or 'yacc' env [ 'YACCFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'YACCCOM' ] = '$YACC $YACCFLAGS -o $TARGET $SOURCES' env [ 'YACCHFILESUFFIX' ] = '.h' env [ 'YACCHXXFILESUFFIX' ] = '.hpp' env [ 'YACCVCGFILESUFFIX' ] = '.vcg'
Add Builders and construction variables for yacc to an Environment .
300
13
22,498
def generate ( env ) : SCons . Tool . createSharedLibBuilder ( env ) SCons . Tool . createProgBuilder ( env ) env [ 'LINK' ] = '$CC' env [ 'LINKFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'LINKCOM' ] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS' env [ 'LIBDIRPREFIX' ] = '' env [ 'LIBDIRSUFFIX' ] = '' env [ 'LIBLINKPREFIX' ] = '' env [ 'LIBLINKSUFFIX' ] = '$LIBSUFFIX'
Add Builders and construction variables for Borland ilink to an Environment .
154
15
22,499
def find_sdk_dir ( self ) : if not SCons . Util . can_read_reg : debug ( 'find_sdk_dir(): can not read registry' ) return None hkey = self . HKEY_FMT % self . hkey_data debug ( 'find_sdk_dir(): checking registry:{}' . format ( hkey ) ) try : sdk_dir = common . read_reg ( hkey ) except SCons . Util . WinError as e : debug ( 'find_sdk_dir(): no SDK registry key {}' . format ( repr ( hkey ) ) ) return None debug ( 'find_sdk_dir(): Trying SDK Dir: {}' . format ( sdk_dir ) ) if not os . path . exists ( sdk_dir ) : debug ( 'find_sdk_dir(): {} not on file system' . format ( sdk_dir ) ) return None ftc = os . path . join ( sdk_dir , self . sanity_check_file ) if not os . path . exists ( ftc ) : debug ( "find_sdk_dir(): sanity check {} not found" . format ( ftc ) ) return None return sdk_dir
Try to find the MS SDK from the registry .
268
10