idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
25,800
def _cleanup_resourceprovider ( self ) : self . resourceprovider = ResourceProvider ( self . args ) try : self . resourceprovider . cleanup ( ) self . logger . info ( "Cleanup done." ) except Exception as error : self . logger . error ( "Cleanup failed! %s" , error )
Calls cleanup for ResourceProvider of this run .
25,801
def _init_cloud ( self , cloud_arg ) : cloud = None if cloud_arg : try : if hasattr ( self . args , "cm" ) : cloud_module = self . args . cm if self . args . cm else None self . logger . info ( "Creating cloud module {}." . format ( cloud_module ) ) else : cloud_module = None cloud = Cloud ( host = None , module = clou...
Initializes Cloud module if cloud_arg is set .
25,802
def generate ( self , * args , ** kwargs ) : title = kwargs . get ( "title" ) heads = kwargs . get ( "heads" ) refresh = kwargs . get ( "refresh" ) filename = args [ 0 ] report = self . _create ( title , heads , refresh , path_start = os . path . dirname ( filename ) ) ReportHtml . save ( report , filename )
Implementation for the generate method defined in ReportBase . Generates a html report and saves it .
25,803
def check_int ( integer ) : if not isinstance ( integer , str ) : return False if integer [ 0 ] in ( '-' , '+' ) : return integer [ 1 : ] . isdigit ( ) return integer . isdigit ( )
Check if number is integer or not .
25,804
def _is_pid_running_on_unix ( pid ) : try : os . kill ( pid , 0 ) except OSError as err : return not err . errno == os . errno . ESRCH return True
Check if PID is running for Unix systems .
25,805
def _is_pid_running_on_windows ( pid ) : import ctypes . wintypes kernel32 = ctypes . windll . kernel32 handle = kernel32 . OpenProcess ( 1 , 0 , pid ) if handle == 0 : return False exit_code = ctypes . wintypes . DWORD ( ) ret = kernel32 . GetExitCodeProcess ( handle , ctypes . byref ( exit_code ) ) is_alive = ( ret =...
Check if PID is running for Windows systems
25,806
def strip_escape ( string = '' , encoding = "utf-8" ) : matches = [ ] try : if hasattr ( string , "decode" ) : string = string . decode ( encoding ) except Exception : pass try : for match in ansi_eng . finditer ( string ) : matches . append ( match ) except TypeError as error : raise TypeError ( "Unable to strip escap...
Strip escape characters from string .
25,807
def import_module ( modulename ) : module = None try : module = importlib . import_module ( modulename ) except ImportError : if "." in modulename : modules = modulename . split ( "." ) package = "." . join ( modules [ 1 : len ( modules ) ] ) module = importlib . import_module ( package ) else : raise return module
Static method for importing module modulename . Can handle relative imports as well .
25,808
def get_abs_path ( relative_path ) : abs_path = os . path . sep . join ( os . path . abspath ( sys . modules [ __name__ ] . __file__ ) . split ( os . path . sep ) [ : - 1 ] ) abs_path = os . path . abspath ( abs_path + os . path . sep + relative_path ) return abs_path
Get absolute path for relative path .
25,809
def get_pkg_version ( pkg_name , parse = False ) : import pkg_resources try : version = pkg_resources . require ( pkg_name ) [ 0 ] . version return pkg_resources . parse_version ( version ) if parse else version except pkg_resources . DistributionNotFound : return None
Verify and get installed python package version .
25,810
def generate_object_graphs_by_class ( classlist ) : try : import objgraph import gc except ImportError : return graphcount = 0 if not isinstance ( classlist , list ) : classlist = [ classlist ] for class_item in classlist : for obj in gc . get_objects ( ) : if isinstance ( obj , class_item ) : graphcount += 1 objgraph ...
Generate reference and backreference graphs for objects of type class for each class given in classlist . Useful for debugging reference leaks in framework etc .
25,811
def remove_empty_from_dict ( dictionary ) : if isinstance ( dictionary , dict ) : return dict ( ( k , remove_empty_from_dict ( v ) ) for k , v in iteritems ( dictionary ) if v and remove_empty_from_dict ( v ) ) elif isinstance ( dictionary , list ) : return [ remove_empty_from_dict ( v ) for v in dictionary if v and re...
Remove empty items from dictionary d
25,812
def set_or_delete ( dictionary , key , value ) : if value : dictionary [ key ] = value else : if dictionary . get ( key ) : del dictionary [ key ]
Set value as value of dict key key . If value is None delete key key from dict .
25,813
def initLogger ( name ) : logger = logging . getLogger ( name ) logger . setLevel ( logging . INFO ) if not getattr ( logger , "streamhandler_set" , None ) : consolehandler = logging . StreamHandler ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) consolehandler . setFormat...
Initializes a basic logger . Can be replaced when constructing the HttpApi object or afterwards with setter
25,814
def find_duplicate_keys ( data ) : out_dict = { } for key , value in data : if key in out_dict : raise ValueError ( "Duplicate key: {}" . format ( key ) ) out_dict [ key ] = value return out_dict
Find duplicate keys in a layer of ordered pairs . Intended as the object_pairs_hook callable for json . load or loads .
25,815
def _load ( self ) : if self . is_exists ( ) : return open ( self . _ref , "rb" ) . read ( ) raise NotFoundError ( "File %s not found" % self . _ref )
Function load .
25,816
def get_file ( self ) : content = self . _load ( ) if not content : return None filename = "temporary_file.bin" with open ( filename , "wb" ) as file_name : file_name . write ( content ) return filename
Load data into a file and return file path .
25,817
def as_dict ( self ) : my_info = { } if self . platform : my_info [ "model" ] = self . platform if self . resource_id : my_info [ "sn" ] = self . resource_id if self . vendor : my_info [ "vendor" ] = self . vendor if self . provider : my_info [ "provider" ] = self . provider return my_info
Generate a dictionary of the contents of this DutInformation object .
25,818
def get_resource_ids ( self ) : resids = [ ] if self . dutinformations : for info in self . dutinformations : resids . append ( info . resource_id ) return resids return "unknown"
Get resource ids as a list .
25,819
def push_resource_cache ( resourceid , info ) : if not resourceid : raise ResourceInitError ( "Resource id missing" ) if not DutInformationList . _cache . get ( resourceid ) : DutInformationList . _cache [ resourceid ] = dict ( ) DutInformationList . _cache [ resourceid ] = merge ( DutInformationList . _cache [ resourc...
Cache resource specific information
25,820
def get_resource_cache ( resourceid ) : if not resourceid : raise ResourceInitError ( "Resource id missing" ) if not DutInformationList . _cache . get ( resourceid ) : DutInformationList . _cache [ resourceid ] = dict ( ) return DutInformationList . _cache [ resourceid ]
Get a cached dictionary related to an individual resourceid .
25,821
def create_result_object ( result ) : _result = { 'tcid' : result . get_tc_name ( ) , 'campaign' : result . campaign , 'cre' : { 'user' : result . tester } , 'job' : { 'id' : result . job_id } , 'exec' : { 'verdict' : result . get_verdict ( ) , 'duration' : result . duration , 'note' : result . get_fail_reason ( ) , 'd...
Create cloud result object from Result .
25,822
def append_logs_to_result_object ( result_obj , result ) : logs = result . has_logs ( ) result_obj [ "exec" ] [ "logs" ] = [ ] if logs and result . logfiles : for log in logs : typ = None parts = log . split ( os . sep ) if "bench" in parts [ len ( parts ) - 1 ] : typ = "framework" if typ is not None : name = parts [ l...
Append log files to cloud result object from Result .
25,823
def get_available_devices ( self ) : connected_devices = self . mbeds . list_mbeds ( ) if self . mbeds else [ ] edbg_ports = self . available_edbg_ports ( ) for port in edbg_ports : connected_devices . append ( { "platform_name" : "SAM4E" , "serial_port" : port , "mount_point" : None , "target_id" : None , "baud_rate" ...
Gets available devices using mbedls and self . available_edbg_ports .
25,824
def available_edbg_ports ( self ) : ports_available = sorted ( list ( list_ports . comports ( ) ) ) edbg_ports = [ ] for iport in ports_available : port = iport [ 0 ] desc = iport [ 1 ] hwid = iport [ 2 ] if str ( desc ) . startswith ( "EDBG Virtual COM Port" ) or "VID:PID=03EB:2111" in str ( hwid ) . upper ( ) : try :...
Finds available EDBG COM ports .
25,825
def store_traces ( self , value ) : if not value : self . logger . debug ( "Stopping storing received lines for dut %d" , self . index ) self . _store_traces = False else : self . logger . debug ( "Resuming storing received lines for dut %d" , self . index ) self . _store_traces = True
Setter for _store_traces . _store_traces controls in memory storing of received lines . Also logs the change for the user .
25,826
def init_wait_register ( self ) : app = self . config . get ( "application" ) if app : bef_init_cmds = app . get ( "cli_ready_trigger" ) if bef_init_cmds : self . init_done . clear ( ) self . init_event_matcher = EventMatcher ( EventTypes . DUT_LINE_RECEIVED , bef_init_cmds , self , self . init_done ) self . init_wait_...
Initialize EventMatcher to wait for certain cli_ready_trigger to arrive from this Dut .
25,827
def wait_init ( self ) : init_done = self . init_done . wait ( timeout = self . init_wait_timeout ) if not init_done : if hasattr ( self , "peek" ) : app = self . config . get ( "application" ) if app : bef_init_cmds = app . get ( "cli_ready_trigger" ) if bef_init_cmds in self . peek ( ) : init_done = True return init_...
Block until init_done flag is set or until init_wait_timeout happens .
25,828
def init_cli_human ( self ) : if self . post_cli_cmds is None : self . post_cli_cmds = self . set_default_init_cli_human_cmds ( ) for cli_cmd in self . post_cli_cmds : try : if isinstance ( cli_cmd , list ) and len ( cli_cmd ) >= 2 : asynchronous = cli_cmd [ 1 ] if len ( cli_cmd ) > 2 : wait = cli_cmd [ 2 ] else : wait...
Send post_cli_cmds to dut
25,829
def set_time_function ( self , function ) : if isinstance ( function , types . FunctionType ) : self . get_time = function else : raise ValueError ( "Invalid value for DUT time function" )
Set time function to be used .
25,830
def open_dut ( self , port = None ) : if port is not None : self . comport = port try : self . open_connection ( ) except ( DutConnectionError , ValueError ) as err : self . close_dut ( use_prepare = False ) raise DutConnectionError ( str ( err ) ) except KeyboardInterrupt : self . close_dut ( use_prepare = False ) sel...
Open connection to dut .
25,831
def _wait_for_exec_ready ( self ) : while not self . response_received . wait ( 1 ) and self . query_timeout != 0 : if self . query_timeout != 0 and self . query_timeout < self . get_time ( ) : if self . prev : cmd = self . prev . cmd else : cmd = "???" self . logger . error ( "CMD timeout: " + cmd ) self . query_timeo...
Wait for response .
25,832
def execute_command ( self , req , ** kwargs ) : if isinstance ( req , string_types ) : timeout = 50 wait = True asynchronous = False for key in kwargs : if key == 'wait' : wait = kwargs [ key ] elif key == 'timeout' : timeout = kwargs [ key ] elif key == 'asynchronous' : asynchronous = kwargs [ key ] req = CliRequest ...
Execute command and return CliResponse
25,833
def close_dut ( self , use_prepare = True ) : if not self . stopped : self . logger . debug ( "Close '%s' connection" % self . dut_name , extra = { 'type' : '<->' } ) if use_prepare : try : self . prepare_connection_close ( ) except TestStepFail : pass self . stopped = True Dut . _dutlist . remove ( self ) if Dut . _si...
Close connection to dut .
25,834
def process_dut ( dut ) : if dut . finished ( ) : return Dut . _signalled_duts . appendleft ( dut ) Dut . _sem . release ( )
Signal worker thread that specified Dut needs processing
25,835
def run ( ) : Dut . _logger . debug ( "Start DUT communication" , extra = { 'type' : '<->' } ) while Dut . _run : Dut . _sem . acquire ( ) try : dut = Dut . _signalled_duts . pop ( ) if dut . waiting_for_response is not None : item = dut . waiting_for_response dut . response_coming_in = dut . _read_response ( ) if dut ...
Main thread runner for all Duts .
25,836
def _read_response ( self ) : try : line = self . readline ( ) except RuntimeError : Dut . _logger . warning ( "Failed to read PIPE" , extra = { 'type' : '!<-' } ) return - 1 if line : if self . store_traces : self . traces . append ( line ) self . response_traces . append ( line ) EventObject ( EventTypes . DUT_LINE_R...
Internal response reader .
25,837
def check_retcode ( self , line ) : retcode = None match = re . search ( r"retcode\: ([-\d]{1,})" , line ) if match : retcode = num ( str ( match . group ( 1 ) ) ) match = re . search ( "cmd tasklet init" , line ) if match : self . logger . debug ( "Device Boot up" , extra = { 'type' : ' ' } ) return - 1 return retco...
Look for retcode on line line and return return code if found .
25,838
def start_dut_thread ( self ) : if Dut . _th is None : Dut . _run = True Dut . _sem = Semaphore ( 0 ) Dut . _signalled_duts = deque ( ) Dut . _logger = LogManager . get_bench_logger ( 'Dut' ) Dut . _th = Thread ( target = Dut . run , name = 'DutThread' ) Dut . _th . daemon = True Dut . _th . start ( )
Start Dut thread .
25,839
def _event_received ( self , ref , data ) : match = self . _resolve_match_data ( ref , data ) if match : if self . flag_to_set : self . flag_to_set . set ( ) if self . callback : self . callback ( EventMatch ( ref , data , match ) ) if self . __forget : self . forget ( )
Handle received event .
25,840
def write_file ( self , content , filepath = None , filename = None , indent = None , keys_to_write = None ) : path = filepath if filepath else self . filepath name = filename if filename else self . filename if not os . path . exists ( path ) : try : os . makedirs ( path ) except OSError as error : self . logger . err...
Write a Python dictionary as JSON to a file .
25,841
def read_file ( self , filepath = None , filename = None ) : name = filename if filename else self . filename path = filepath if filepath else self . filepath name = self . _ends_with ( name , ".json" ) path = self . _ends_with ( path , os . path . sep ) try : return self . _read_json ( path , name ) except Environment...
Tries to read JSON content from filename and convert it to a dict .
25,842
def read_value ( self , key , filepath = None , filename = None ) : path = filepath if filepath else self . filepath name = filename if filename else self . filename name = self . _ends_with ( name , ".json" ) path = self . _ends_with ( path , os . path . sep ) try : output = self . _read_json ( path , name ) if key no...
Tries to read the value of given key from JSON file filename .
25,843
def write_values ( self , data , filepath = None , filename = None , indent = None , keys_to_write = None ) : name = filename if filename else self . filename path = filepath if filepath else self . filepath name = self . _ends_with ( name , ".json" ) path = self . _ends_with ( path , os . path . sep ) if not os . path...
Tries to write extra content to a JSON file .
25,844
def _write_json ( self , filepath , filename , writemode , content , indent ) : with open ( os . path . join ( filepath , filename ) , writemode ) as fil : json . dump ( content , fil , indent = indent ) self . logger . info ( "Wrote content to file {}" . format ( filename ) )
Helper for writing content to a file .
25,845
def _read_json ( self , path , name ) : with open ( os . path . join ( path , name ) , 'r' ) as fil : output = json . load ( fil ) self . logger . info ( "Read contents of {}" . format ( name ) ) return output
Load a json into a dictionary from a file .
25,846
def _ends_with ( self , string_to_edit , end ) : if not string_to_edit . endswith ( end ) : return string_to_edit + end return string_to_edit
Check if string ends with characters in end if not merge end to string .
25,847
def parse ( self , * args , ** kwargs ) : cmd = args [ 0 ] resp = args [ 1 ] if cmd in self . parsers : try : return self . parsers [ cmd ] ( resp ) except Exception as err : print ( err ) return { }
Parse response .
25,848
def append ( self , result ) : if isinstance ( result , Result ) : self . data . append ( result ) elif isinstance ( result , ResultList ) : self . data += result . data else : raise TypeError ( 'unknown result type' )
Append a new Result to the list .
25,849
def save ( self , heads , console = True ) : self . _save_junit ( ) self . _save_html_report ( heads ) if console : self . _print_console_summary ( )
Create reports in different formats .
25,850
def _save_junit ( self ) : report = ReportJunit ( self ) file_name = report . get_latest_filename ( "result.junit.xml" , "" ) report . generate ( file_name ) file_name = report . get_latest_filename ( "junit.xml" , "../" ) report . generate ( file_name )
Save Junit report .
25,851
def _save_html_report ( self , heads = None , refresh = None ) : report = ReportHtml ( self ) heads = heads if heads else { } test_report_filename = report . get_current_filename ( "html" ) report . generate ( test_report_filename , title = 'Test Results' , heads = heads , refresh = refresh ) latest_report_filename = r...
Save html report .
25,852
def success_count ( self ) : return len ( [ i for i , result in enumerate ( self . data ) if result . success ] )
Amount of passed test cases in this list .
25,853
def failure_count ( self ) : return len ( [ i for i , result in enumerate ( self . data ) if result . failure ] )
Amount of failed test cases in this list .
25,854
def inconclusive_count ( self ) : inconc_count = len ( [ i for i , result in enumerate ( self . data ) if result . inconclusive ] ) unknown_count = len ( [ i for i , result in enumerate ( self . data ) if result . get_verdict ( ) == "unknown" ] ) return inconc_count + unknown_count
Amount of inconclusive test cases in this list .
25,855
def retry_count ( self ) : retries = len ( [ i for i , result in enumerate ( self . data ) if result . retries_left > 0 ] ) return retries
Amount of retried test cases in this list .
25,856
def skip_count ( self ) : return len ( [ i for i , result in enumerate ( self . data ) if result . skip ] )
Amount of skipped test cases in this list .
25,857
def clean_fails ( self ) : for item in self . data : if item . failure and not item . retries_left > 0 : return True return False
Check if there are any fails that were not subsequently retried .
25,858
def clean_inconcs ( self ) : for item in self . data : if ( item . inconclusive or item . get_verdict ( ) == "unknown" ) and not item . retries_left > 0 : return True return False
Check if there are any inconclusives or uknowns that were not subsequently retried .
25,859
def total_duration ( self ) : durations = [ result . duration for result in self . data ] return sum ( durations )
Sum of the durations of the tests in this list .
25,860
def pass_rate ( self , include_skips = False , include_inconclusive = False , include_retries = True ) : total = self . count ( ) success = self . success_count ( ) retries = self . retry_count ( ) try : if include_inconclusive and include_skips and include_retries : val = 100.0 * success / total elif include_inconclus...
Calculate pass rate for tests in this list .
25,861
def get_summary ( self ) : return { "count" : self . count ( ) , "pass" : self . success_count ( ) , "fail" : self . failure_count ( ) , "skip" : self . skip_count ( ) , "inconclusive" : self . inconclusive_count ( ) , "retries" : self . retry_count ( ) , "duration" : self . total_duration ( ) }
Get a summary of this ResultLists contents as dictionary .
25,862
def next ( self ) : try : result = self . data [ self . index ] except IndexError : self . index = 0 raise StopIteration self . index += 1 return result
Implementation of next method from Iterator .
25,863
def deprecated ( message = "" ) : def decorator_wrapper ( func ) : @ functools . wraps ( func ) def function_wrapper ( * args , ** kwargs ) : current_call_source = '|' . join ( traceback . format_stack ( inspect . currentframe ( ) ) ) if current_call_source not in function_wrapper . last_call_source : warnings . warn (...
This is a decorator which can be used to mark functions as deprecated . It will result in a warning being emitted when the function is used first time and filter is set for show DeprecationWarning .
25,864
def remove_file ( filename , path = None ) : cwd = os . getcwd ( ) try : if path : os . chdir ( path ) except OSError : raise try : os . remove ( filename ) os . chdir ( cwd ) return True except OSError : os . chdir ( cwd ) raise
Remove file filename from path .
25,865
def verify_message ( self , expected_response , break_in_fail = True ) : ok = True try : ok = verify_message ( self . lines , expected_response ) except ( TypeError , LookupError ) as inst : ok = False if break_in_fail : raise inst if ok is False and break_in_fail : raise LookupError ( "Unexpected message found" ) retu...
Verifies that expected_response is found in self . lines .
25,866
def verify_trace ( self , expected_traces , break_in_fail = True ) : ok = True try : ok = verify_message ( self . traces , expected_traces ) except ( TypeError , LookupError ) as inst : ok = False if break_in_fail : raise inst if ok is False and break_in_fail : raise LookupError ( "Unexpected message found" ) return ok
Verifies that expectedResponse is found in self . traces
25,867
def verify_response_duration ( self , expected = None , zero = 0 , threshold_percent = 0 , break_in_fail = True ) : was = self . timedelta - zero error = abs ( was / expected ) * 100.0 - 100.0 if expected > 0 else 0 msg = "should: %.3f, was: %.3f, error: %.3f %%" % ( expected , was , error ) self . logger . debug ( msg...
Verify that response duration is in bounds .
25,868
def _hardware_count ( self ) : return self . _counts . get ( "hardware" ) + self . _counts . get ( "serial" ) + self . _counts . get ( "mbed" )
Amount of hardware resources .
25,869
def _resolve_requirements ( self , requirements ) : try : dut_count = requirements [ "duts" ] [ "*" ] [ "count" ] except KeyError : return [ ] default_values = { "type" : "hardware" , "allowed_platforms" : [ ] , "nick" : None , } default_values . update ( requirements [ "duts" ] [ "*" ] ) del default_values [ "count" ]...
Internal method for resolving requirements into resource configurations .
25,870
def _solve_location ( self , req , dut_req_len , idx ) : if not req . get ( "location" ) : return if len ( req . get ( "location" ) ) == 2 : for x_and_y , coord in enumerate ( req . get ( "location" ) ) : if isinstance ( coord , string_types ) : coord = ResourceConfig . __replace_coord_variables ( coord , x_and_y , dut...
Helper function for resolving the location for a resource .
25,871
def __replace_base_variables ( text , req_len , idx ) : return text . replace ( "{i}" , str ( idx + 1 ) ) . replace ( "{n}" , str ( req_len ) )
Replace i and n in text with index + 1 and req_len .
25,872
def __replace_coord_variables ( text , x_and_y , req_len , idx ) : return ResourceConfig . __replace_base_variables ( text , req_len , idx ) . replace ( "{xy}" , str ( x_and_y ) ) . replace ( "{pi}" , str ( math . pi ) )
Replace x and y with their coordinates and replace pi with value of pi .
25,873
def __generate_indexed_requirements ( dut_count , basekeys , requirements ) : dut_requirements = [ ] for i in range ( 1 , dut_count + 1 ) : dut_requirement = ResourceRequirements ( basekeys . copy ( ) ) if i in requirements [ "duts" ] : for k in requirements [ "duts" ] [ i ] : dut_requirement . set ( k , requirements [...
Generate indexed requirements from general requirements .
25,874
def _resolve_hardware_count ( self ) : length = len ( [ d for d in self . _dut_requirements if d . get ( "type" ) in [ "hardware" , "serial" , "mbed" ] ] ) self . _hardware_count = length
Calculate amount of hardware resources .
25,875
def _resolve_process_count ( self ) : length = len ( [ d for d in self . _dut_requirements if d . get ( "type" ) == "process" ] ) self . _process_count = length
Calculate amount of process resources .
25,876
def _resolve_dut_count ( self ) : self . _dut_count = len ( self . _dut_requirements ) self . _resolve_process_count ( ) self . _resolve_hardware_count ( ) if self . _dut_count != self . _hardware_count + self . _process_count : raise ValueError ( "Missing or invalid type fields in dut configuration!" )
Calculates total amount of resources required and their types .
25,877
def set_dut_configuration ( self , ident , config ) : if hasattr ( config , "get_requirements" ) : self . _dut_requirements [ ident ] = config elif isinstance ( config , dict ) : self . _dut_requirements [ ident ] = ResourceRequirements ( config )
Set requirements for dut ident .
25,878
def flash ( self , binary_location = None , forceflash = None ) : if not Flash : self . logger . error ( "Mbed-flasher not installed!" ) raise ImportError ( "Mbed-flasher not installed!" ) try : self . build = Build . init ( binary_location ) except NotImplementedError as error : self . logger . error ( "Build initiali...
Flash a binary to the target device using mbed - flasher .
25,879
def _flash_needed ( self , ** kwargs ) : forceflash = kwargs . get ( "forceflash" , False ) cur_binary_sha1 = self . dutinformation . build_binary_sha1 if not forceflash and self . build . sha1 == cur_binary_sha1 : return False return True
Check if flashing is needed . Flashing can be skipped if resource binary_sha1 attribute matches build sha1 and forceflash is not True .
25,880
def get_params ( self ) : return self . timeout , self . xonxoff , self . rtscts , self . baudrate
Get parameters as a tuple .
25,881
def open_connection ( self ) : if self . readthread is not None : raise DutConnectionError ( "Trying to open serial port which was already open" ) self . logger . info ( "Open Connection " "for '%s' using '%s' baudrate: %d" % ( self . dut_name , self . comport , self . serial_baudrate ) , extra = { 'type' : '<->' } ) i...
Open serial port connection .
25,882
def close_connection ( self ) : if self . port : self . stop ( ) self . logger . debug ( "Close port '%s'" % self . comport , extra = { 'type' : '<->' } ) self . port . close ( ) self . port = False
Closes serial port connection .
25,883
def __send_break ( self ) : if self . port : self . logger . debug ( "sendBreak to device to reboot" , extra = { 'type' : '<->' } ) result = self . port . safe_sendBreak ( ) time . sleep ( 1 ) if result : self . logger . debug ( "reset completed" , extra = { 'type' : '<->' } ) else : self . logger . warning ( "reset fa...
Sends break to device .
25,884
def writeline ( self , data ) : try : if self . ch_mode : data += "\n" parts = split_by_n ( data , self . ch_mode_chunk_size ) for split_str in parts : self . port . write ( split_str . encode ( ) ) time . sleep ( self . ch_mode_ch_delay ) else : self . port . write ( ( data + "\n" ) . encode ( ) ) except SerialExcepti...
Writes data to serial port .
25,885
def _readline ( self , timeout = 1 ) : line = self . port . readline ( timeout = timeout ) return strip_escape ( line . strip ( ) ) if line is not None else line
Read line from serial port .
25,886
def run ( self ) : self . keep_reading = True while self . keep_reading : line = self . _readline ( ) if line : self . input_queue . appendleft ( line ) Dut . process_dut ( self )
Read lines while keep_reading is True . Calls process_dut for each received line .
25,887
def stop ( self ) : self . keep_reading = False if self . readthread is not None : self . readthread . join ( ) self . readthread = None
Stops and joins readthread .
25,888
def print_info ( self ) : table = PrettyTable ( ) start_string = "DutSerial {} \n" . format ( self . name ) row = [ ] info_string = "" if self . config : info_string = info_string + "Configuration for this DUT:\n\n {} \n" . format ( self . config ) if self . comport : table . add_column ( "COM port" , [ ] ) row . appen...
Prints Dut information nicely formatted into a table .
25,889
def append ( self , data ) : for k in self . _entries . keys ( ) : self . _entries [ k ] . append ( data . _entries [ k ] )
Append a Data instance to self
25,890
def init_group ( self , group , chunk_size , compression = None , compression_opts = None ) : create_index ( group , chunk_size ) self . _entries [ 'items' ] . create_dataset ( group , chunk_size , compression = compression , compression_opts = compression_opts ) self . _entries [ 'features' ] . create_dataset ( group ...
Initializes a HDF5 group compliant with the stored data .
25,891
def is_appendable_to ( self , group ) : if not all ( [ k in group for k in self . _entries . keys ( ) ] ) : return False for k in self . _entries . keys ( ) : if not self . _entries [ k ] . is_appendable_to ( group ) : return False return True
Returns True if the data can be appended in a given group .
25,892
def write_to ( self , group , append = False ) : write_index ( self , group , append ) self . _entries [ 'items' ] . write_to ( group ) self . _entries [ 'features' ] . write_to ( group , append ) self . _entries [ 'labels' ] . write_to ( group ) if self . has_properties ( ) : self . _entries [ 'properties' ] . write_t...
Write the data to the given group .
25,893
def check ( labels ) : if not isinstance ( labels , list ) : raise IOError ( 'labels are not in a list' ) if not len ( labels ) : raise IOError ( 'the labels list is empty' ) if not all ( [ isinstance ( l , np . ndarray ) for l in labels ] ) : raise IOError ( 'all labels must be numpy arrays' ) ndim = labels [ 0 ] . nd...
Raise IOError if labels are not correct
25,894
def _write ( self , item , labels , features ) : data = Data ( [ item ] , [ labels ] , [ features ] ) self . _writer . write ( data , self . groupname , append = True )
Writes the given item to the owned file .
25,895
def convert ( self , infile , item = None ) : if not os . path . isfile ( infile ) : raise IOError ( '{} is not a valid file' . format ( infile ) ) if item is None : item = os . path . splitext ( infile ) [ 0 ] ext = os . path . splitext ( infile ) [ 1 ] if ext == '.npz' : self . npz_convert ( infile , item ) elif ext ...
Convert an input file to h5features based on its extension .
25,896
def npz_convert ( self , infile , item ) : data = np . load ( infile ) labels = self . _labels ( data ) features = data [ 'features' ] self . _write ( item , labels , features )
Convert a numpy NPZ file to h5features .
25,897
def h5features_convert ( self , infile ) : with h5py . File ( infile , 'r' ) as f : groups = list ( f . keys ( ) ) for group in groups : self . _writer . write ( Reader ( infile , group ) . read ( ) , self . groupname , append = True )
Convert a h5features file to the latest h5features version .
25,898
def read ( filename , groupname = None , from_item = None , to_item = None , from_time = None , to_time = None , index = None ) : if index is not None : raise NotImplementedError reader = Reader ( filename , groupname ) data = ( reader . read ( from_item , to_item , from_time , to_time ) if index is None else reader . ...
Reads in a h5features file .
25,899
def write ( filename , groupname , items , times , features , properties = None , dformat = 'dense' , chunk_size = 'auto' , sparsity = 0.1 , mode = 'a' ) : sparsity = sparsity if dformat == 'sparse' else None data = Data ( items , times , features , properties = properties , sparsity = sparsity , check = True ) Writer ...
Write h5features data in a HDF5 file .