idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
21,700 | def initialize_remaining_constants ( self , value = 0 ) : remaining = [ ] for node , _inputs , _outputs in self . iterate_bfs ( ) : streams = node . input_streams ( ) + [ node . stream ] for stream in streams : if stream . stream_type is not DataStream . ConstantType : continue if stream not in self . constant_database : self . add_constant ( stream , value ) remaining . append ( stream ) return remaining | Ensure that all constant streams referenced in the sensor graph have a value . | 106 | 15 |
21,701 | def load_constants ( self ) : for stream , value in self . constant_database . items ( ) : self . sensor_log . push ( stream , IOTileReading ( 0 , stream . encode ( ) , value ) ) | Load all constants into their respective streams . | 50 | 8 |
21,702 | def get_config ( self , slot , config_id ) : if slot not in self . config_database : raise ArgumentError ( "No config variables have been set on specified slot" , slot = slot ) if config_id not in self . config_database [ slot ] : raise ArgumentError ( "Config variable has not been set on specified slot" , slot = slot , config_id = config_id ) return self . config_database [ slot ] [ config_id ] | Get a config variable assignment previously set on this sensor graph . | 101 | 12 |
21,703 | def is_output ( self , stream ) : for streamer in self . streamers : if streamer . selector . matches ( stream ) : return True return False | Check if a stream is a sensor graph output . | 34 | 10 |
21,704 | def get_tick ( self , name ) : name_map = { 'fast' : config_fast_tick_secs , 'user1' : config_tick1_secs , 'user2' : config_tick2_secs } config = name_map . get ( name ) if config is None : raise ArgumentError ( "Unknown tick requested" , name = name ) slot = SlotIdentifier . FromString ( 'controller' ) try : var = self . get_config ( slot , config ) return var [ 1 ] except ArgumentError : return 0 | Check the config variables to see if there is a configurable tick . | 121 | 14 |
21,705 | def mark_streamer ( self , index ) : self . _logger . debug ( "Marking streamer %d manually" , index ) if index >= len ( self . streamers ) : raise ArgumentError ( "Invalid streamer index" , index = index , num_streamers = len ( self . streamers ) ) self . _manually_triggered_streamers . add ( index ) | Manually mark a streamer that should trigger . | 87 | 10 |
21,706 | def check_streamers ( self , blacklist = None ) : ready = [ ] selected = set ( ) for i , streamer in enumerate ( self . streamers ) : if blacklist is not None and i in blacklist : continue if i in selected : continue marked = False if i in self . _manually_triggered_streamers : marked = True self . _manually_triggered_streamers . remove ( i ) if streamer . triggered ( marked ) : self . _logger . debug ( "Streamer %d triggered, manual=%s" , i , marked ) ready . append ( streamer ) selected . add ( i ) # Handle streamers triggered with another for j , streamer2 in enumerate ( self . streamers [ i : ] ) : if streamer2 . with_other == i and j not in selected and streamer2 . triggered ( True ) : self . _logger . debug ( "Streamer %d triggered due to with-other on %d" , j , i ) ready . append ( streamer2 ) selected . add ( j ) return ready | Check if any streamers are ready to produce a report . | 234 | 12 |
21,707 | def sort_nodes ( self ) : node_map = { id ( node ) : i for i , node in enumerate ( self . nodes ) } node_deps = { } for node , inputs , _outputs in self . iterate_bfs ( ) : node_index = node_map [ id ( node ) ] deps = { node_map [ id ( x ) ] for x in inputs } node_deps [ node_index ] = deps # Now that we have our dependency tree properly built, topologically # sort the nodes and reorder them. node_order = toposort_flatten ( node_deps ) self . nodes = [ self . nodes [ x ] for x in node_order ] #Check root nodes all topographically sorted to the beginning for root in self . roots : if root not in self . nodes [ 0 : len ( self . roots ) ] : raise NodeConnectionError ( "Inputs not sorted in the beginning" , node = str ( root ) , node_position = self . nodes . index ( root ) ) | Topologically sort all of our nodes . | 229 | 8 |
21,708 | def generate ( env ) : try : bld = env [ 'BUILDERS' ] [ 'Ipkg' ] except KeyError : bld = SCons . Builder . Builder ( action = '$IPKGCOM' , suffix = '$IPKGSUFFIX' , source_scanner = None , target_scanner = None ) env [ 'BUILDERS' ] [ 'Ipkg' ] = bld env [ 'IPKG' ] = 'ipkg-build' env [ 'IPKGCOM' ] = '$IPKG $IPKGFLAGS ${SOURCE}' if env . WhereIs ( 'id' ) : env [ 'IPKGUSER' ] = os . popen ( 'id -un' ) . read ( ) . strip ( ) env [ 'IPKGGROUP' ] = os . popen ( 'id -gn' ) . read ( ) . strip ( ) env [ 'IPKGFLAGS' ] = SCons . Util . CLVar ( '-o $IPKGUSER -g $IPKGGROUP' ) env [ 'IPKGSUFFIX' ] = '.ipk' | Add Builders and construction variables for ipkg to an Environment . | 254 | 13 |
21,709 | def triggered ( self , walker ) : if self . use_count : comp_value = walker . count ( ) else : if walker . count ( ) == 0 : return False comp_value = walker . peek ( ) . value return self . comp_function ( comp_value , self . reference ) | Check if this input is triggered on the given stream walker . | 67 | 13 |
21,710 | def connect_input ( self , index , walker , trigger = None ) : if trigger is None : trigger = TrueTrigger ( ) if index >= len ( self . inputs ) : raise TooManyInputsError ( "Input index exceeded max number of inputs" , index = index , max_inputs = len ( self . inputs ) , stream = self . stream ) self . inputs [ index ] = ( walker , trigger ) | Connect an input to a stream walker . | 90 | 9 |
21,711 | def input_streams ( self ) : streams = [ ] for walker , _trigger in self . inputs : if walker . selector is None or not walker . selector . singular : continue streams . append ( walker . selector . as_stream ( ) ) return streams | Return a list of DataStream objects for all singular input streams . | 58 | 13 |
21,712 | def find_input ( self , stream ) : for i , input_x in enumerate ( self . inputs ) : if input_x [ 0 ] . matches ( stream ) : return i | Find the input that responds to this stream . | 40 | 9 |
21,713 | def num_inputs ( self ) : num = 0 for walker , _ in self . inputs : if not isinstance ( walker , InvalidStreamWalker ) : num += 1 return num | Return the number of connected inputs . | 40 | 7 |
21,714 | def connect_output ( self , node ) : if len ( self . outputs ) == self . max_outputs : raise TooManyOutputsError ( "Attempted to connect too many nodes to the output of a node" , max_outputs = self . max_outputs , stream = self . stream ) self . outputs . append ( node ) | Connect another node to our output . | 74 | 7 |
21,715 | def triggered ( self ) : trigs = [ x [ 1 ] . triggered ( x [ 0 ] ) for x in self . inputs ] if self . trigger_combiner == self . OrTriggerCombiner : return True in trigs return False not in trigs | Test if we should trigger our operation . | 55 | 8 |
21,716 | def set_func ( self , name , func ) : self . func_name = name self . func = func | Set the processing function to use for this node . | 24 | 10 |
21,717 | def process ( self , rpc_executor , mark_streamer = None ) : if self . func is None : raise ProcessingFunctionError ( 'No processing function set for node' , stream = self . stream ) results = self . func ( * [ x [ 0 ] for x in self . inputs ] , rpc_executor = rpc_executor , mark_streamer = mark_streamer ) if results is None : results = [ ] return results | Run this node s processing function . | 99 | 7 |
21,718 | def generate ( env ) : fortran . generate ( env ) env [ 'FORTRAN' ] = 'f90' env [ 'FORTRANCOM' ] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env [ 'FORTRANPPCOM' ] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env [ 'SHFORTRANCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env [ 'SHFORTRANPPCOM' ] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env [ 'OBJSUFFIX' ] = '.obj' env [ 'FORTRANMODDIR' ] = '${TARGET.dir}' env [ 'FORTRANMODDIRPREFIX' ] = '/module:' env [ 'FORTRANMODDIRSUFFIX' ] = '' | Add Builders and construction variables for compaq visual fortran to an Environment . | 352 | 17 |
21,719 | def read ( self ) : self . build ( ) if not hasattr ( self , 'built_value' ) : self . built_value = self . value return self . built_value | Return the value . If necessary the value is built . | 40 | 11 |
21,720 | def get_csig ( self , calc = None ) : try : return self . ninfo . csig except AttributeError : pass contents = self . get_contents ( ) self . get_ninfo ( ) . csig = contents return contents | Because we re a Python value node and don t have a real timestamp we get to ignore the calculator and just use the value contents . | 54 | 27 |
21,721 | def mark_complex ( self , name , serializer , deserializer ) : self . _complex_properties [ name ] = ( serializer , deserializer ) | Mark a property as complex with serializer and deserializer functions . | 35 | 14 |
21,722 | def mark_typed_list ( self , name , type_object ) : if not hasattr ( type_object , 'dump' ) : raise ArgumentError ( "The passed type object %s is missing required method: dump()" % type_object ) if not hasattr ( type_object , 'Restore' ) : raise ArgumentError ( "The passed type object %s is missing required method: Restore()" % type_object ) def _dump_list ( obj ) : if obj is None : return None if not isinstance ( obj , list ) : raise DataError ( "Property %s marked as list was not a list: %s" % ( name , repr ( obj ) ) ) return [ x . dump ( ) for x in obj ] def _restore_list ( obj ) : if obj is None : return obj return [ type_object . Restore ( x ) for x in obj ] self . mark_complex ( name , _dump_list , _restore_list ) | Mark a property as containing serializable objects of a given type . | 212 | 13 |
21,723 | def mark_typed_map ( self , name , type_object ) : if not hasattr ( type_object , 'dump' ) : raise ArgumentError ( "The passed type object %s is missing required method: dump()" % type_object ) if not hasattr ( type_object , 'Restore' ) : raise ArgumentError ( "The passed type object %s is missing required method: Restore()" % type_object ) def _dump_map ( obj ) : if obj is None : return None if not isinstance ( obj , dict ) : raise DataError ( "Property %s marked as list was not a dict: %s" % ( name , repr ( obj ) ) ) return { key : val . dump ( ) for key , val in obj . items ( ) } def _restore_map ( obj ) : if obj is None : return obj return { key : type_object . Restore ( val ) for key , val in obj . items ( ) } self . mark_complex ( name , _dump_map , _restore_map ) | Mark a property as containing a map str to serializable object . | 228 | 13 |
21,724 | def mark_typed_object ( self , name , type_object ) : if not hasattr ( type_object , 'dump' ) : raise ArgumentError ( "The passed type object %s is missing required method: dump()" % type_object ) if not hasattr ( type_object , 'Restore' ) : raise ArgumentError ( "The passed type object %s is missing required method: Restore()" % type_object ) def _dump_obj ( obj ) : if obj is None : return None return obj . dump ( ) def _restore_obj ( obj ) : if obj is None : return obj return type_object . Restore ( obj ) self . mark_complex ( name , _dump_obj , _restore_obj ) | Mark a property as containing a serializable object . | 161 | 10 |
21,725 | def dump_property ( self , name ) : if not hasattr ( self , name ) : raise ArgumentError ( "Unknown property %s" % name ) value = getattr ( self , name ) if name in self . _complex_properties : value = self . _complex_properties [ name ] [ 0 ] ( value ) return value | Serialize a property of this class by name . | 71 | 10 |
21,726 | def get_properties ( self ) : names = inspect . getmembers ( self , predicate = lambda x : not inspect . ismethod ( x ) ) return [ x [ 0 ] for x in names if not x [ 0 ] . startswith ( "_" ) and x [ 0 ] not in self . _ignored_properties ] | Get a list of all of the public data properties of this class . | 70 | 14 |
21,727 | def get_default_version ( env ) : if 'MSVS' not in env or not SCons . Util . is_Dict ( env [ 'MSVS' ] ) : # get all versions, and remember them for speed later versions = [ vs . version for vs in get_installed_visual_studios ( ) ] env [ 'MSVS' ] = { 'VERSIONS' : versions } else : versions = env [ 'MSVS' ] . get ( 'VERSIONS' , [ ] ) if 'MSVS_VERSION' not in env : if versions : env [ 'MSVS_VERSION' ] = versions [ 0 ] #use highest version by default else : debug ( 'get_default_version: WARNING: no installed versions found, ' 'using first in SupportedVSList (%s)' % SupportedVSList [ 0 ] . version ) env [ 'MSVS_VERSION' ] = SupportedVSList [ 0 ] . version env [ 'MSVS' ] [ 'VERSION' ] = env [ 'MSVS_VERSION' ] return env [ 'MSVS_VERSION' ] | Returns the default version string to use for MSVS . | 232 | 11 |
21,728 | def get_default_arch ( env ) : arch = env . get ( 'MSVS_ARCH' , 'x86' ) msvs = InstalledVSMap . get ( env [ 'MSVS_VERSION' ] ) if not msvs : arch = 'x86' elif not arch in msvs . get_supported_arch ( ) : fmt = "Visual Studio version %s does not support architecture %s" raise SCons . Errors . UserError ( fmt % ( env [ 'MSVS_VERSION' ] , arch ) ) return arch | Return the default arch to use for MSVS | 118 | 9 |
21,729 | def format_rpc ( self , address , rpc_id , payload ) : addr_word = ( rpc_id | ( address << 16 ) | ( ( 1 << 1 ) << 24 ) ) send_length = len ( payload ) if len ( payload ) < 20 : payload = payload + b'\0' * ( 20 - len ( payload ) ) payload_words = struct . unpack ( "<5L" , payload ) return self . base_address + self . RPC_TLS_OFFSET + 8 , ( [ addr_word , send_length , 0 ] + [ x for x in payload_words ] ) | Create a formated word list that encodes this rpc . | 136 | 13 |
21,730 | def format_response ( self , response_data ) : _addr , length = self . response_info ( ) if len ( response_data ) != length : raise HardwareError ( "Invalid response read length, should be the same as what response_info() returns" , expected = length , actual = len ( response_data ) ) resp , flags , received_length , payload = struct . unpack ( "<HxBL4x20s" , response_data ) resp = resp & 0xFF if flags & ( 1 << 3 ) : raise HardwareError ( "Could not grab external gate" ) if received_length > 20 : raise HardwareError ( "Invalid received payload length > 20 bytes" , received_length = received_length ) payload = payload [ : received_length ] return { 'status' : resp , 'payload' : payload } | Format an RPC response . | 180 | 5 |
21,731 | def ProgramScanner ( * * kw ) : kw [ 'path_function' ] = SCons . Scanner . FindPathDirs ( 'LIBPATH' ) ps = SCons . Scanner . Base ( scan , "ProgramScanner" , * * kw ) return ps | Return a prototype Scanner instance for scanning executable files for static - lib dependencies | 62 | 15 |
21,732 | def _subst_libs ( env , libs ) : if SCons . Util . is_String ( libs ) : libs = env . subst ( libs ) if SCons . Util . is_String ( libs ) : libs = libs . split ( ) elif SCons . Util . is_Sequence ( libs ) : _libs = [ ] for l in libs : _libs += _subst_libs ( env , l ) libs = _libs else : # libs is an object (Node, for example) libs = [ libs ] return libs | Substitute environment variables and split into list . | 137 | 10 |
21,733 | def scan ( node , env , libpath = ( ) ) : try : libs = env [ 'LIBS' ] except KeyError : # There are no LIBS in this environment, so just return a null list: return [ ] libs = _subst_libs ( env , libs ) try : prefix = env [ 'LIBPREFIXES' ] if not SCons . Util . is_List ( prefix ) : prefix = [ prefix ] except KeyError : prefix = [ '' ] try : suffix = env [ 'LIBSUFFIXES' ] if not SCons . Util . is_List ( suffix ) : suffix = [ suffix ] except KeyError : suffix = [ '' ] pairs = [ ] for suf in map ( env . subst , suffix ) : for pref in map ( env . subst , prefix ) : pairs . append ( ( pref , suf ) ) result = [ ] if callable ( libpath ) : libpath = libpath ( ) find_file = SCons . Node . FS . find_file adjustixes = SCons . Util . adjustixes for lib in libs : if SCons . Util . is_String ( lib ) : for pref , suf in pairs : l = adjustixes ( lib , pref , suf ) l = find_file ( l , libpath , verbose = print_find_libs ) if l : result . append ( l ) else : result . append ( lib ) return result | This scanner scans program files for static - library dependencies . It will search the LIBPATH environment variable for libraries specified in the LIBS variable returning any files it finds as dependencies . | 318 | 35 |
21,734 | def clear_to_reset ( self , config_vars ) : super ( RemoteBridgeState , self ) . clear_to_reset ( config_vars ) self . status = BRIDGE_STATUS . IDLE self . error = 0 | Clear the RemoteBridge subsystem to its reset state . | 53 | 10 |
21,735 | def begin_script ( self ) : if self . remote_bridge . status in ( BRIDGE_STATUS . RECEIVED , BRIDGE_STATUS . VALIDATED , BRIDGE_STATUS . EXECUTING ) : return [ 1 ] #FIXME: Return correct error here self . remote_bridge . status = BRIDGE_STATUS . WAITING self . remote_bridge . error = 0 self . remote_bridge . script_error = None self . remote_bridge . parsed_script = None self . _device . script = bytearray ( ) return [ 0 ] | Indicate we are going to start loading a script . | 132 | 11 |
21,736 | def end_script ( self ) : if self . remote_bridge . status not in ( BRIDGE_STATUS . RECEIVED , BRIDGE_STATUS . WAITING ) : return [ 1 ] #FIXME: State change self . remote_bridge . status = BRIDGE_STATUS . RECEIVED return [ 0 ] | Indicate that we have finished receiving a script . | 76 | 10 |
21,737 | def trigger_script ( self ) : if self . remote_bridge . status not in ( BRIDGE_STATUS . RECEIVED , ) : return [ 1 ] #FIXME: State change # This is asynchronous in real life so just cache the error try : self . remote_bridge . parsed_script = UpdateScript . FromBinary ( self . _device . script ) #FIXME: Actually run the script self . remote_bridge . status = BRIDGE_STATUS . IDLE except Exception as exc : self . _logger . exception ( "Error parsing script streamed to device" ) self . remote_bridge . script_error = exc self . remote_bridge . error = 1 # FIXME: Error code return [ 0 ] | Actually process a script . | 158 | 5 |
21,738 | def reset_script ( self ) : self . remote_bridge . status = BRIDGE_STATUS . IDLE self . remote_bridge . error = 0 self . remote_bridge . parsed_script = None self . _device . script = bytearray ( ) return [ 0 ] | Clear any partially received script . | 62 | 6 |
21,739 | def render_template_inplace ( template_path , info , dry_run = False , extra_filters = None , resolver = None ) : filters = { } if resolver is not None : filters [ 'find_product' ] = _create_resolver_filter ( resolver ) if extra_filters is not None : filters . update ( extra_filters ) basedir = os . path . dirname ( template_path ) template_name = os . path . basename ( template_path ) if not template_name . endswith ( '.tpl' ) : raise ArgumentError ( "You must specify a filename that ends in .tpl" , filepath = template_path ) out_path = os . path . join ( basedir , template_name [ : - 4 ] ) if basedir == '' : basedir = '.' env = Environment ( loader = FileSystemLoader ( basedir ) , trim_blocks = True , lstrip_blocks = True ) # Load any filters the user wants us to use for name , func in filters . items ( ) : env . filters [ name ] = func template = env . get_template ( template_name ) result = template . render ( info ) if not dry_run : with open ( out_path , 'wb' ) as outfile : outfile . write ( result . encode ( 'utf-8' ) ) return out_path | Render a template file in place . | 302 | 7 |
21,740 | def render_template ( template_name , info , out_path = None ) : env = Environment ( loader = PackageLoader ( 'iotile.build' , 'config/templates' ) , trim_blocks = True , lstrip_blocks = True ) template = env . get_template ( template_name ) result = template . render ( info ) if out_path is not None : with open ( out_path , 'wb' ) as outfile : outfile . write ( result . encode ( 'utf-8' ) ) return result | Render a template using the variables in info . | 116 | 9 |
21,741 | def render_recursive_template ( template_folder , info , out_folder , preserve = None , dry_run = False ) : if isinstance ( preserve , str ) : raise ArgumentError ( "You must pass a list of strings to preserve, not a string" , preserve = preserve ) if preserve is None : preserve = [ ] preserve = set ( preserve ) template_dir = resource_path ( 'templates' , expect = 'folder' ) indir = os . path . join ( template_dir , template_folder ) if not os . path . exists ( indir ) : raise ArgumentError ( "Input template folder for recursive template not found" , template_folder = template_folder , absolute_path = indir ) elif not os . path . isdir ( indir ) : raise ArgumentError ( "Input template folder is not a directory" , template_folder = template_folder , absolute_path = indir ) create_dirs = [ ] file_map = { } # Walk over all input files for dirpath , dirs , files in os . walk ( indir ) : for file in files : in_abspath = os . path . abspath ( os . path . join ( dirpath , file ) ) in_path = os . path . relpath ( os . path . join ( dirpath , file ) , start = indir ) if file . endswith ( ".tpl" ) and not in_path in preserve : out_path = in_path [ : - 4 ] else : out_path = in_path file_map [ out_path ] = ( in_path , in_abspath ) for folder in dirs : dir_path = os . path . relpath ( os . path . join ( dirpath , folder ) , start = indir ) create_dirs . append ( dir_path ) # Actually render / copy all files if we are not doing a dry run if not dry_run : for folder in create_dirs : out_path = os . path . join ( out_folder , folder ) if not os . path . isdir ( out_path ) : os . makedirs ( out_path ) for out_rel , ( in_path , in_abspath ) in file_map . items ( ) : out_path = os . path . join ( out_folder , out_rel ) if in_path in preserve or not in_path . endswith ( ".tpl" ) : shutil . copyfile ( in_abspath , out_path ) else : # jinja needs to have unix path separators regardless of the platform and a relative path # from the templates base directory in_template_path = os . path . join ( template_folder , in_path ) . replace ( os . path . sep , '/' ) render_template ( in_template_path , info , out_path = out_path ) return file_map , create_dirs | Copy a directory tree rendering all templates found within . | 637 | 10 |
21,742 | def _find_monitor ( monitors , handle ) : found_devs = set ( ) found_events = set ( ) for conn_string , device in monitors . items ( ) : for event , handles in device . items ( ) : if handle in handles : found_events . add ( event ) found_devs . add ( conn_string ) return found_devs , found_events | Find all devices and events with a given monitor installed . | 83 | 11 |
21,743 | def _add_monitor ( monitors , handle , callback , devices , events ) : for conn_string in devices : data = monitors . get ( conn_string ) if data is None : data = dict ( ) monitors [ conn_string ] = data for event in events : event_dict = data . get ( event ) if event_dict is None : event_dict = dict ( ) data [ event ] = event_dict event_dict [ handle ] = callback | Add the given monitor to the listed devices and events . | 97 | 11 |
21,744 | def _remove_monitor ( monitors , handle , devices , events ) : empty_devices = [ ] for conn_string in devices : data = monitors . get ( conn_string ) if data is None : continue for event in events : event_dict = data . get ( event ) if event_dict is None : continue if handle in event_dict : del event_dict [ handle ] if len ( event_dict ) == 0 : del data [ event ] if len ( data ) == 0 : empty_devices . append ( conn_string ) return empty_devices | Remove the given monitor from the listed devices and events . | 118 | 11 |
21,745 | def register_monitor ( self , devices , events , callback ) : # Ensure we don't exhaust any iterables events = list ( events ) devices = list ( devices ) for event in events : if event not in self . SUPPORTED_EVENTS : raise ArgumentError ( "Unknown event type {} specified" . format ( event ) , events = events ) monitor_id = str ( uuid . uuid4 ( ) ) action = ( monitor_id , "add" , devices , events ) self . _callbacks [ monitor_id ] = callback if self . _currently_notifying : self . _deferred_adjustments . append ( action ) else : self . _adjust_monitor_internal ( * action ) return monitor_id | Register a callback when events happen . | 155 | 7 |
21,746 | def adjust_monitor ( self , handle , action , devices , events ) : events = list ( events ) devices = list ( devices ) for event in events : if event not in self . SUPPORTED_EVENTS : raise ArgumentError ( "Unknown event type {} specified" . format ( event ) , events = events ) if action not in self . SUPPORTED_ADJUSTMENTS : raise ArgumentError ( "Unknown adjustment {} specified" . format ( action ) ) action = ( handle , action , devices , events ) if self . _currently_notifying : self . _deferred_adjustments . append ( action ) else : self . _adjust_monitor_internal ( * action ) | Adjust a previously registered callback . | 142 | 6 |
21,747 | def remove_monitor ( self , handle ) : action = ( handle , "delete" , None , None ) if self . _currently_notifying : self . _deferred_adjustments . append ( action ) else : self . _adjust_monitor_internal ( * action ) | Remove a previously registered monitor . | 59 | 6 |
21,748 | async def _notify_event_internal ( self , conn_string , name , event ) : try : self . _currently_notifying = True conn_id = self . _get_conn_id ( conn_string ) event_maps = self . _monitors . get ( conn_string , { } ) wildcard_maps = self . _monitors . get ( None , { } ) wildcard_handlers = wildcard_maps . get ( name , { } ) event_handlers = event_maps . get ( name , { } ) for handler , func in itertools . chain ( event_handlers . items ( ) , wildcard_handlers . items ( ) ) : try : result = func ( conn_string , conn_id , name , event ) if inspect . isawaitable ( result ) : await result except : #pylint:disable=bare-except;This is a background function and we are logging exceptions self . _logger . warning ( "Error calling notification callback id=%s, func=%s" , handler , func , exc_info = True ) finally : for action in self . _deferred_adjustments : self . _adjust_monitor_internal ( * action ) self . _deferred_adjustments = [ ] self . _currently_notifying = False | Notify that an event has occured . | 286 | 9 |
21,749 | def notify_progress ( self , conn_string , operation , finished , total , wait = True ) : if operation not in self . PROGRESS_OPERATIONS : raise ArgumentError ( "Invalid operation for progress event: {}" . format ( operation ) ) event = dict ( operation = operation , finished = finished , total = total ) if wait : return self . notify_event ( conn_string , 'progress' , event ) self . notify_event_nowait ( conn_string , 'progress' , event ) return None | Send a progress event . | 111 | 5 |
21,750 | def generate ( env ) : cplusplus . generate ( env ) env [ 'CXX' ] = 'CC' env [ 'CXXFLAGS' ] = SCons . Util . CLVar ( '-LANG:std' ) env [ 'SHCXX' ] = '$CXX' env [ 'SHOBJSUFFIX' ] = '.o' env [ 'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME' ] = 1 | Add Builders and construction variables for SGI MIPS C ++ to an Environment . | 108 | 17 |
21,751 | def read_packet ( self , timeout = 3.0 ) : try : return self . queue . get ( timeout = timeout ) except Empty : raise InternalTimeoutError ( "Timeout waiting for packet in AsyncPacketBuffer" ) | read one packet timeout if one packet is not available in the timeout period | 49 | 14 |
21,752 | def generate ( env ) : java_file = SCons . Tool . CreateJavaFileBuilder ( env ) java_class = SCons . Tool . CreateJavaClassFileBuilder ( env ) java_class_dir = SCons . Tool . CreateJavaClassDirBuilder ( env ) java_class . add_emitter ( None , emit_java_classes ) java_class . add_emitter ( env . subst ( '$JAVASUFFIX' ) , emit_java_classes ) java_class_dir . emitter = emit_java_classes env . AddMethod ( Java ) env [ 'JAVAC' ] = 'javac' env [ 'JAVACFLAGS' ] = SCons . Util . CLVar ( '' ) env [ 'JAVABOOTCLASSPATH' ] = [ ] env [ 'JAVACLASSPATH' ] = [ ] env [ 'JAVASOURCEPATH' ] = [ ] env [ '_javapathopt' ] = pathopt env [ '_JAVABOOTCLASSPATH' ] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' env [ '_JAVACLASSPATH' ] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' env [ '_JAVASOURCEPATH' ] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' env [ '_JAVASOURCEPATHDEFAULT' ] = '${TARGET.attributes.java_sourcedir}' env [ '_JAVACCOM' ] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' env [ 'JAVACCOM' ] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}" env [ 'JAVACLASSSUFFIX' ] = '.class' env [ 'JAVASUFFIX' ] = '.java' | Add Builders and construction variables for javac to an Environment . | 501 | 14 |
21,753 | def Add ( self , key , help = "" , default = None , validator = None , converter = None , * * kw ) : if SCons . Util . is_List ( key ) or isinstance ( key , tuple ) : self . _do_add ( * key ) return if not SCons . Util . is_String ( key ) or not SCons . Environment . is_valid_construction_var ( key ) : raise SCons . Errors . UserError ( "Illegal Variables.Add() key `%s'" % str ( key ) ) self . _do_add ( key , help , default , validator , converter ) | Add an option . | 142 | 4 |
21,754 | def Update ( self , env , args = None ) : values = { } # first set the defaults: for option in self . options : if not option . default is None : values [ option . key ] = option . default # next set the value specified in the options file for filename in self . files : if os . path . exists ( filename ) : dir = os . path . split ( os . path . abspath ( filename ) ) [ 0 ] if dir : sys . path . insert ( 0 , dir ) try : values [ '__name__' ] = filename with open ( filename , 'r' ) as f : contents = f . read ( ) exec ( contents , { } , values ) finally : if dir : del sys . path [ 0 ] del values [ '__name__' ] # set the values specified on the command line if args is None : args = self . args for arg , value in args . items ( ) : added = False for option in self . options : if arg in list ( option . aliases ) + [ option . key ] : values [ option . key ] = value added = True if not added : self . unknown [ arg ] = value # put the variables in the environment: # (don't copy over variables that are not declared as options) for option in self . options : try : env [ option . key ] = values [ option . key ] except KeyError : pass # Call the convert functions: for option in self . options : if option . converter and option . key in values : value = env . subst ( '${%s}' % option . key ) try : try : env [ option . key ] = option . converter ( value ) except TypeError : env [ option . key ] = option . converter ( value , env ) except ValueError as x : raise SCons . Errors . UserError ( 'Error converting option: %s\n%s' % ( option . key , x ) ) # Finally validate the values: for option in self . options : if option . validator and option . key in values : option . validator ( option . key , env . subst ( '${%s}' % option . key ) , env ) | Update an environment with the option variables . | 462 | 8 |
21,755 | def Save ( self , filename , env ) : # Create the file and write out the header try : fh = open ( filename , 'w' ) try : # Make an assignment in the file for each option # within the environment that was assigned a value # other than the default. for option in self . options : try : value = env [ option . key ] try : prepare = value . prepare_to_store except AttributeError : try : eval ( repr ( value ) ) except KeyboardInterrupt : raise except : # Convert stuff that has a repr() that # cannot be evaluated into a string value = SCons . Util . to_String ( value ) else : value = prepare ( ) defaultVal = env . subst ( SCons . Util . to_String ( option . default ) ) if option . converter : defaultVal = option . converter ( defaultVal ) if str ( env . subst ( '${%s}' % option . key ) ) != str ( defaultVal ) : fh . write ( '%s = %s\n' % ( option . key , repr ( value ) ) ) except KeyError : pass finally : fh . close ( ) except IOError as x : raise SCons . Errors . UserError ( 'Error writing options to file: %s\n%s' % ( filename , x ) ) | Saves all the options in the given file . This file can then be used to load the options next run . This can be used to create an option cache file . | 284 | 34 |
21,756 | def GenerateHelpText ( self , env , sort = None ) : if callable ( sort ) : options = sorted ( self . options , key = cmp_to_key ( lambda x , y : sort ( x . key , y . key ) ) ) elif sort is True : options = sorted ( self . options , key = lambda x : x . key ) else : options = self . options def format ( opt , self = self , env = env ) : if opt . key in env : actual = env . subst ( '${%s}' % opt . key ) else : actual = None return self . FormatVariableHelpText ( env , opt . key , opt . help , opt . default , actual , opt . aliases ) lines = [ _f for _f in map ( format , options ) if _f ] return '' . join ( lines ) | Generate the help text for the options . | 183 | 9 |
21,757 | def render_tree ( root , child_func , prune = 0 , margin = [ 0 ] , visited = None ) : rname = str ( root ) # Initialize 'visited' dict, if required if visited is None : visited = { } children = child_func ( root ) retval = "" for pipe in margin [ : - 1 ] : if pipe : retval = retval + "| " else : retval = retval + " " if rname in visited : return retval + "+-[" + rname + "]\n" retval = retval + "+-" + rname + "\n" if not prune : visited = copy . copy ( visited ) visited [ rname ] = 1 for i in range ( len ( children ) ) : margin . append ( i < len ( children ) - 1 ) retval = retval + render_tree ( children [ i ] , child_func , prune , margin , visited ) margin . pop ( ) return retval | Render a tree of nodes into an ASCII tree view . | 213 | 11 |
21,758 | def print_tree ( root , child_func , prune = 0 , showtags = 0 , margin = [ 0 ] , visited = None ) : rname = str ( root ) # Initialize 'visited' dict, if required if visited is None : visited = { } if showtags : if showtags == 2 : legend = ( ' E = exists\n' + ' R = exists in repository only\n' + ' b = implicit builder\n' + ' B = explicit builder\n' + ' S = side effect\n' + ' P = precious\n' + ' A = always build\n' + ' C = current\n' + ' N = no clean\n' + ' H = no cache\n' + '\n' ) sys . stdout . write ( legend ) tags = [ '[' ] tags . append ( ' E' [ IDX ( root . exists ( ) ) ] ) tags . append ( ' R' [ IDX ( root . rexists ( ) and not root . exists ( ) ) ] ) tags . append ( ' BbB' [ [ 0 , 1 ] [ IDX ( root . has_explicit_builder ( ) ) ] + [ 0 , 2 ] [ IDX ( root . has_builder ( ) ) ] ] ) tags . append ( ' S' [ IDX ( root . side_effect ) ] ) tags . append ( ' P' [ IDX ( root . precious ) ] ) tags . append ( ' A' [ IDX ( root . always_build ) ] ) tags . append ( ' C' [ IDX ( root . is_up_to_date ( ) ) ] ) tags . append ( ' N' [ IDX ( root . noclean ) ] ) tags . append ( ' H' [ IDX ( root . nocache ) ] ) tags . append ( ']' ) else : tags = [ ] def MMM ( m ) : return [ " " , "| " ] [ m ] margins = list ( map ( MMM , margin [ : - 1 ] ) ) children = child_func ( root ) if prune and rname in visited and children : sys . stdout . write ( '' . join ( tags + margins + [ '+-[' , rname , ']' ] ) + '\n' ) return sys . stdout . write ( '' . join ( tags + margins + [ '+-' , rname ] ) + '\n' ) visited [ rname ] = 1 if children : margin . append ( 1 ) idx = IDX ( showtags ) for C in children [ : - 1 ] : print_tree ( C , child_func , prune , idx , margin , visited ) margin [ - 1 ] = 0 print_tree ( children [ - 1 ] , child_func , prune , idx , margin , visited ) margin . pop ( ) | Print a tree of nodes . This is like render_tree except it prints lines directly instead of creating a string representation in memory so that huge trees can be printed . | 622 | 33 |
21,759 | def flatten ( obj , isinstance = isinstance , StringTypes = StringTypes , SequenceTypes = SequenceTypes , do_flatten = do_flatten ) : if isinstance ( obj , StringTypes ) or not isinstance ( obj , SequenceTypes ) : return [ obj ] result = [ ] for item in obj : if isinstance ( item , StringTypes ) or not isinstance ( item , SequenceTypes ) : result . append ( item ) else : do_flatten ( item , result ) return result | Flatten a sequence to a non - nested list . | 107 | 11 |
21,760 | def unique ( s ) : n = len ( s ) if n == 0 : return [ ] # Try using a dict first, as that's the fastest and will usually # work. If it doesn't work, it will usually fail quickly, so it # usually doesn't cost much to *try* it. It requires that all the # sequence elements be hashable, and support equality comparison. u = { } try : for x in s : u [ x ] = 1 except TypeError : pass # move on to the next method else : return list ( u . keys ( ) ) del u # We can't hash all the elements. Second fastest is to sort, # which brings the equal elements together; then duplicates are # easy to weed out in a single pass. # NOTE: Python's list.sort() was designed to be efficient in the # presence of many duplicate elements. This isn't true of all # sort functions in all languages or libraries, so this approach # is more effective in Python than it may be elsewhere. try : t = sorted ( s ) except TypeError : pass # move on to the next method else : assert n > 0 last = t [ 0 ] lasti = i = 1 while i < n : if t [ i ] != last : t [ lasti ] = last = t [ i ] lasti = lasti + 1 i = i + 1 return t [ : lasti ] del t # Brute force is all that's left. u = [ ] for x in s : if x not in u : u . append ( x ) return u | Return a list of the elements in s but without duplicates . | 330 | 13 |
21,761 | def make_path_relative ( path ) : if os . path . isabs ( path ) : drive_s , path = os . path . splitdrive ( path ) import re if not drive_s : path = re . compile ( "/*(.*)" ) . findall ( path ) [ 0 ] else : path = path [ 1 : ] assert ( not os . path . isabs ( path ) ) , path return path | makes an absolute path name to a relative pathname . | 91 | 11 |
21,762 | def RenameFunction ( function , name ) : return FunctionType ( function . __code__ , function . __globals__ , name , function . __defaults__ ) | Returns a function identical to the specified function but with the specified name . | 37 | 14 |
21,763 | def _create_old_return_value ( payload , num_ints , buff ) : parsed = { 'ints' : payload [ : num_ints ] , 'buffer' : None , 'error' : 'No Error' , 'is_error' : False , 'return_value' : 0 } if buff : parsed [ 'buffer' ] = bytearray ( payload [ - 1 ] ) return parsed | Parse the response of an RPC call into a dictionary with integer and buffer results | 88 | 16 |
21,764 | def hardware_version ( self ) : res = self . rpc ( 0x00 , 0x02 , result_type = ( 0 , True ) ) # Result is a string but with zero appended to the end to make it a fixed 10 byte size binary_version = res [ 'buffer' ] ver = "" for x in binary_version : if x != 0 : ver += chr ( x ) return ver | Return the embedded hardware version string for this tile . | 88 | 10 |
21,765 | def check_hardware ( self , expected ) : if len ( expected ) < 10 : expected += '\0' * ( 10 - len ( expected ) ) err , = self . rpc ( 0x00 , 0x03 , expected , result_format = "L" ) if err == 0 : return True return False | Make sure the hardware version is what we expect . | 69 | 10 |
21,766 | def status ( self ) : hw_type , name , major , minor , patch , status = self . rpc ( 0x00 , 0x04 , result_format = "H6sBBBB" ) status = { 'hw_type' : hw_type , 'name' : name . decode ( 'utf-8' ) , 'version' : ( major , minor , patch ) , 'status' : status } return status | Query the status of an IOTile including its name and version | 95 | 13 |
21,767 | def tile_status ( self ) : stat = self . status ( ) flags = stat [ 'status' ] # FIXME: This needs to stay in sync with lib_common: cdb_status.h status = { } status [ 'debug_mode' ] = bool ( flags & ( 1 << 3 ) ) status [ 'configured' ] = bool ( flags & ( 1 << 1 ) ) status [ 'app_running' ] = bool ( flags & ( 1 << 0 ) ) status [ 'trapped' ] = bool ( flags & ( 1 << 2 ) ) return status | Get the current status of this tile | 124 | 7 |
21,768 | async def client_event_handler ( self , client_id , event_tuple , user_data ) : conn_string , event_name , _event = event_tuple self . _logger . debug ( "Ignoring event %s from device %s forwarded for client %s" , event_name , conn_string , client_id ) return None | Method called to actually send an event to a client . | 79 | 11 |
21,769 | def setup_client ( self , client_id = None , user_data = None , scan = True , broadcast = False ) : if client_id is None : client_id = str ( uuid . uuid4 ( ) ) if client_id in self . _clients : raise ArgumentError ( "Duplicate client_id: {}" . format ( client_id ) ) async def _client_callback ( conn_string , _ , event_name , event ) : event_tuple = ( conn_string , event_name , event ) await self . _forward_client_event ( client_id , event_tuple ) client_monitor = self . adapter . register_monitor ( [ ] , [ ] , _client_callback ) self . _clients [ client_id ] = dict ( user_data = user_data , connections = { } , monitor = client_monitor ) self . _adjust_global_events ( client_id , scan , broadcast ) return client_id | Setup a newly connected client . | 214 | 6 |
21,770 | async def stop ( self ) : clients = list ( self . _clients ) for client in clients : self . _logger . info ( "Tearing down client %s at server stop()" , client ) await self . teardown_client ( client ) | Stop the server and teardown any remaining clients . | 57 | 11 |
21,771 | async def teardown_client ( self , client_id ) : client_info = self . _client_info ( client_id ) self . adapter . remove_monitor ( client_info [ 'monitor' ] ) conns = client_info [ 'connections' ] for conn_string , conn_id in conns . items ( ) : try : self . _logger . debug ( "Disconnecting client %s from conn %s at teardown" , client_id , conn_string ) await self . adapter . disconnect ( conn_id ) except : #pylint:disable=bare-except; This is a finalization method that should not raise unexpectedly self . _logger . exception ( "Error disconnecting device during teardown_client: conn_string=%s" , conn_string ) del self . _clients [ client_id ] | Release all resources held by a client . | 189 | 8 |
21,772 | async def connect ( self , client_id , conn_string ) : conn_id = self . adapter . unique_conn_id ( ) self . _client_info ( client_id ) await self . adapter . connect ( conn_id , conn_string ) self . _hook_connect ( conn_string , conn_id , client_id ) | Connect to a device on behalf of a client . | 76 | 10 |
21,773 | async def disconnect ( self , client_id , conn_string ) : conn_id = self . _client_connection ( client_id , conn_string ) try : await self . adapter . disconnect ( conn_id ) finally : self . _hook_disconnect ( conn_string , client_id ) | Disconnect from a device on behalf of a client . | 66 | 11 |
21,774 | async def open_interface ( self , client_id , conn_string , interface ) : conn_id = self . _client_connection ( client_id , conn_string ) # Hook first so there is no race on getting the first event self . _hook_open_interface ( conn_string , interface , client_id ) await self . adapter . open_interface ( conn_id , interface ) | Open a device interface on behalf of a client . | 86 | 10 |
21,775 | async def close_interface ( self , client_id , conn_string , interface ) : conn_id = self . _client_connection ( client_id , conn_string ) await self . adapter . close_interface ( conn_id , interface ) self . _hook_close_interface ( conn_string , interface , client_id ) | Close a device interface on behalf of a client . | 73 | 10 |
21,776 | async def send_rpc ( self , client_id , conn_string , address , rpc_id , payload , timeout ) : conn_id = self . _client_connection ( client_id , conn_string ) return await self . adapter . send_rpc ( conn_id , address , rpc_id , payload , timeout ) | Send an RPC on behalf of a client . | 75 | 9 |
21,777 | async def send_script ( self , client_id , conn_string , script ) : conn_id = self . _client_connection ( client_id , conn_string ) await self . adapter . send_script ( conn_id , script ) | Send a script to a device on behalf of a client . | 54 | 12 |
21,778 | async def debug ( self , client_id , conn_string , command , args ) : conn_id = self . _client_info ( client_id , 'connections' ) [ conn_string ] return await self . adapter . debug ( conn_id , command , args ) | Send a debug command to a device on behalf of a client . | 61 | 13 |
21,779 | def registration_packet ( self ) : return ( self . hw_type , self . api_info [ 0 ] , self . api_info [ 1 ] , self . name , self . fw_info [ 0 ] , self . fw_info [ 1 ] , self . fw_info [ 2 ] , self . exec_info [ 0 ] , self . exec_info [ 0 ] , self . exec_info [ 0 ] , self . slot , self . unique_id ) | Serialize this into a tuple suitable for returning from an RPC . | 107 | 13 |
21,780 | def clear_to_reset ( self , config_vars ) : super ( TileManagerState , self ) . clear_to_reset ( config_vars ) self . registered_tiles = self . registered_tiles [ : 1 ] self . safe_mode = False self . debug_mode = False | Clear to the state immediately after a reset . | 66 | 9 |
21,781 | def insert_tile ( self , tile_info ) : for i , tile in enumerate ( self . registered_tiles ) : if tile . slot == tile_info . slot : self . registered_tiles [ i ] = tile_info return self . registered_tiles . append ( tile_info ) | Add or replace an entry in the tile cache . | 66 | 10 |
21,782 | def register_tile ( self , hw_type , api_major , api_minor , name , fw_major , fw_minor , fw_patch , exec_major , exec_minor , exec_patch , slot , unique_id ) : api_info = ( api_major , api_minor ) fw_info = ( fw_major , fw_minor , fw_patch ) exec_info = ( exec_major , exec_minor , exec_patch ) address = 10 + slot info = TileInfo ( hw_type , name , api_info , fw_info , exec_info , slot , unique_id , state = TileState . JUST_REGISTERED , address = address ) self . tile_manager . insert_tile ( info ) debug = int ( self . tile_manager . debug_mode ) if self . tile_manager . safe_mode : run_level = RunLevel . SAFE_MODE info . state = TileState . SAFE_MODE config_rpcs = [ ] else : run_level = RunLevel . START_ON_COMMAND info . state = TileState . BEING_CONFIGURED config_rpcs = self . config_database . stream_matching ( address , name ) self . tile_manager . queue . put_nowait ( ( info , config_rpcs ) ) return [ address , run_level , debug ] | Register a tile with this controller . | 315 | 7 |
21,783 | def describe_tile ( self , index ) : if index >= len ( self . tile_manager . registered_tiles ) : tile = TileInfo . CreateInvalid ( ) else : tile = self . tile_manager . registered_tiles [ index ] return tile . registration_packet ( ) | Get the registration information for the tile at the given index . | 62 | 12 |
21,784 | def ParseHeader ( cls , script_data ) : if len ( script_data ) < UpdateScript . SCRIPT_HEADER_LENGTH : raise ArgumentError ( "Script is too short to contain a script header" , length = len ( script_data ) , header_length = UpdateScript . SCRIPT_HEADER_LENGTH ) embedded_hash , magic , total_length = struct . unpack_from ( "<16sLL" , script_data ) if magic != UpdateScript . SCRIPT_MAGIC : raise ArgumentError ( "Script has invalid magic value" , expected = UpdateScript . SCRIPT_MAGIC , found = magic ) if total_length != len ( script_data ) : raise ArgumentError ( "Script length does not match embedded length" , embedded_length = total_length , length = len ( script_data ) ) hashed_data = script_data [ 16 : ] sha = hashlib . sha256 ( ) sha . update ( hashed_data ) hash_value = sha . digest ( ) [ : 16 ] if not compare_digest ( embedded_hash , hash_value ) : raise ArgumentError ( "Script has invalid embedded hash" , embedded_hash = hexlify ( embedded_hash ) , calculated_hash = hexlify ( hash_value ) ) return ScriptHeader ( UpdateScript . SCRIPT_HEADER_LENGTH , False , True , False ) | Parse a script integrity header . | 306 | 7 |
21,785 | def FromBinary ( cls , script_data , allow_unknown = True , show_rpcs = False ) : curr = 0 records = [ ] header = cls . ParseHeader ( script_data ) curr = header . header_length cls . logger . debug ( "Parsed script header: %s, skipping %d bytes" , header , curr ) record_count = 0 record_data = bytearray ( ) partial_match = None match_offset = 0 while curr < len ( script_data ) : if len ( script_data ) - curr < UpdateRecord . HEADER_LENGTH : raise ArgumentError ( "Script ended with a partial record" , remaining_length = len ( script_data ) - curr ) # Add another record to our current list of records that we're parsing total_length , record_type = struct . unpack_from ( "<LB" , script_data [ curr : ] ) cls . logger . debug ( "Found record of type %d, length %d" , record_type , total_length ) record_data += script_data [ curr : curr + total_length ] record_count += 1 curr += total_length try : if show_rpcs and record_type == SendRPCRecord . MatchType ( ) : cls . logger . debug ( " {0}" . format ( hexlify ( record_data ) ) ) record = SendRPCRecord . FromBinary ( record_data [ UpdateRecord . HEADER_LENGTH : ] , record_count ) elif show_rpcs and record_type == SendErrorCheckingRPCRecord . MatchType ( ) : cls . logger . debug ( " {0}" . format ( hexlify ( record_data ) ) ) record = SendErrorCheckingRPCRecord . FromBinary ( record_data [ UpdateRecord . HEADER_LENGTH : ] , record_count ) else : record = UpdateRecord . FromBinary ( record_data , record_count ) except DeferMatching as defer : # If we're told to defer matching, continue accumulating record_data # until we get a complete match. If a partial match is available, keep track of # that partial match so that we can use it once the record no longer matches. if defer . partial_match is not None : partial_match = defer . partial_match match_offset = curr continue except DataError : if record_count > 1 and partial_match : record = partial_match curr = match_offset elif not allow_unknown : raise elif allow_unknown and record_count > 1 : raise ArgumentError ( "A record matched an initial record subset but failed" " matching a subsequent addition without leaving a partial_match" ) else : record = UnknownRecord ( record_type , record_data [ UpdateRecord . HEADER_LENGTH : ] ) # Reset our record accumulator since we successfully matched one or more records record_count = 0 record_data = bytearray ( ) partial_match = None match_offset = 0 records . append ( record ) return UpdateScript ( records ) | Parse a binary update script . | 678 | 7 |
21,786 | def encode ( self ) : blob = bytearray ( ) for record in self . records : blob += record . encode ( ) header = struct . pack ( "<LL" , self . SCRIPT_MAGIC , len ( blob ) + self . SCRIPT_HEADER_LENGTH ) blob = header + blob sha = hashlib . sha256 ( ) sha . update ( blob ) hash_value = sha . digest ( ) [ : 16 ] return bytearray ( hash_value ) + blob | Encode this record into a binary blob . | 111 | 9 |
21,787 | def create_worker ( self , func , interval , * args , * * kwargs ) : thread = StoppableWorkerThread ( func , interval , args , kwargs ) self . _workers . append ( thread ) if self . _started : thread . start ( ) | Spawn a worker thread running func . | 59 | 7 |
21,788 | def stop_workers ( self ) : self . _started = False for worker in self . _workers : worker . stop ( ) | Synchronously stop any potential workers . | 27 | 8 |
21,789 | def stop_workers_async ( self ) : self . _started = False for worker in self . _workers : worker . signal_stop ( ) | Signal that all workers should stop without waiting . | 32 | 10 |
21,790 | def _download_ota_script ( script_url ) : try : blob = requests . get ( script_url , stream = True ) return blob . content except Exception as e : iprint ( "Failed to download OTA script" ) iprint ( e ) return False | Download the script from the cloud service and store to temporary file location | 58 | 13 |
21,791 | def rename_module ( new , old ) : try : sys . modules [ new ] = imp . load_module ( old , * imp . find_module ( old ) ) return True except ImportError : return False | Attempts to import the old module and load it under the new name . Used for purely cosmetic name changes in Python 3 . x . | 45 | 26 |
21,792 | def _parse_conn_string ( self , conn_string ) : disconnection_required = False """If device not in conn_string, set to default info""" if conn_string is None or 'device' not in conn_string : if self . _default_device_info is not None and self . _device_info != self . _default_device_info : disconnection_required = True self . _device_info = self . _default_device_info if conn_string is None or len ( conn_string ) == 0 : return disconnection_required if '@' in conn_string : raise ArgumentError ( "Configuration files are not yet supported as part of a connection string argument" , conn_string = conn_string ) pairs = conn_string . split ( ';' ) for pair in pairs : name , _ , value = pair . partition ( '=' ) if len ( name ) == 0 or len ( value ) == 0 : continue name = name . strip ( ) value = value . strip ( ) if name == 'device' : if value in DEVICE_ALIASES : device_name = DEVICE_ALIASES [ value ] if device_name in KNOWN_DEVICES : device_info = KNOWN_DEVICES . get ( device_name ) if self . _device_info != device_info : self . _device_info = device_info disconnection_required = True else : raise ArgumentError ( "Unknown device name or alias, please select from known_devices" , device_name = value , known_devices = [ x for x in DEVICE_ALIASES . keys ( ) ] ) elif name == 'channel' : if self . _mux_func is not None : if self . _channel != int ( value ) : self . _channel = int ( value ) disconnection_required = True else : print ( "Warning: multiplexing architecture not selected, channel will not be set" ) return disconnection_required | Parse a connection string passed from debug - c or connect_direct Returns True if any settings changed in the debug port which would require a jlink disconnection | 424 | 32 |
21,793 | def _try_connect ( self , connection_string ) : if self . _parse_conn_string ( connection_string ) : self . _trigger_callback ( 'on_disconnect' , self . id , self . _connection_id ) self . stop_sync ( ) if self . _mux_func is not None : self . _mux_func ( self . _channel ) if self . _device_info is None : raise ArgumentError ( "Missing device name or alias, specify using device=name in port string " "or -c device=name in connect_direct or debug command" , known_devices = [ x for x in DEVICE_ALIASES . keys ( ) ] ) try : self . jlink = pylink . JLink ( ) self . jlink . open ( serial_no = self . _jlink_serial ) self . jlink . set_tif ( pylink . enums . JLinkInterfaces . SWD ) self . jlink . connect ( self . _device_info . jlink_name ) self . jlink . set_little_endian ( ) except pylink . errors . JLinkException as exc : if exc . code == exc . VCC_FAILURE : raise HardwareError ( "No target power detected" , code = exc . code , suggestion = "Check jlink connection and power wiring" ) raise except : raise self . _control_thread = JLinkControlThread ( self . jlink ) self . _control_thread . start ( ) self . set_config ( 'probe_required' , True ) self . set_config ( 'probe_supported' , True ) | If the connecton string settings are different try and connect to an attached device | 359 | 15 |
21,794 | def stop_sync ( self ) : if self . _control_thread is not None and self . _control_thread . is_alive ( ) : self . _control_thread . stop ( ) self . _control_thread . join ( ) if self . jlink is not None : self . jlink . close ( ) | Synchronously stop this adapter and release all resources . | 70 | 11 |
21,795 | def probe_async ( self , callback ) : def _on_finished ( _name , control_info , exception ) : if exception is not None : callback ( self . id , False , str ( exception ) ) return self . _control_info = control_info try : info = { 'connection_string' : "direct" , 'uuid' : control_info . uuid , 'signal_strength' : 100 } self . _trigger_callback ( 'on_scan' , self . id , info , self . ExpirationTime ) finally : callback ( self . id , True , None ) self . _control_thread . command ( JLinkControlThread . FIND_CONTROL , _on_finished , self . _device_info . ram_start , self . _device_info . ram_size ) | Send advertisements for all connected devices . | 178 | 7 |
21,796 | def _open_debug_interface ( self , conn_id , callback , connection_string = None ) : self . _try_connect ( connection_string ) callback ( conn_id , self . id , True , None ) | Enable debug interface for this IOTile device | 48 | 9 |
21,797 | async def _reset_vector ( self ) : self . _logger . info ( "Tile %s at address %d is starting from reset" , self . name , self . address ) try : address , run_level , debug = await self . _device . emulator . await_rpc ( 8 , rpcs . REGISTER_TILE , * self . _registration_tuple ( ) ) except : self . _logger . exception ( "Error registering tile: address=%d, name=%s" , self . address , self . name ) raise self . debug_mode = bool ( debug ) self . run_level = run_level self . _logger . info ( "Tile at address %d registered itself, received address=%d, runlevel=%d and debug=%d" , self . address , address , run_level , debug ) self . _registered . set ( ) # If we are in safe mode we do not run the main application # loop. if run_level == RunLevel . SAFE_MODE : self . initialized . set ( ) return if run_level == RunLevel . START_ON_COMMAND : await self . _start_received . wait ( ) self . _hosted_app_running . set ( ) await self . _application_main ( ) | Main background task for the tile executive . | 284 | 8 |
21,798 | def _handle_reset ( self ) : self . _registered . clear ( ) self . _start_received . clear ( ) self . _hosted_app_running . clear ( ) super ( EmulatedPeripheralTile , self ) . _handle_reset ( ) | Reset this tile . | 58 | 5 |
21,799 | async def start ( self ) : self . _logger . info ( "Starting all device adapters" ) await self . device_manager . start ( ) self . _logger . info ( "Starting all servers" ) for server in self . servers : await server . start ( ) | Start the gateway . | 60 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.