idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
25,600
def init_process_dut ( contextlist , conf , index , args ) : if "subtype" in conf and conf [ "subtype" ] : if conf [ "subtype" ] != "console" : msg = "Unrecognized process subtype: {}" contextlist . logger . error ( msg . format ( conf [ "subtype" ] ) ) raise ResourceInitError ( "Unrecognized process subtype: {}" ) # This is a specialized 'console' process config = None if "application" in conf : config = conf [ "application" ] contextlist . logger . debug ( "Starting a remote console" ) dut = DutConsole ( name = "D%d" % index , conf = config , params = args ) dut . index = index else : binary = conf [ "application" ] [ 'bin' ] app_config = conf [ "application" ] init_cli_cmds = app_config . get ( "init_cli_cmds" , None ) post_cli_cmds = app_config . get ( "post_cli_cmds" , None ) contextlist . logger . debug ( "Starting process '%s'" % binary ) dut = DutProcess ( name = "D%d" % index , config = conf , params = args ) dut . index = index dut . command = binary if args . valgrind : dut . use_valgrind ( args . valgrind_tool , not args . valgrind_text , args . valgrind_console , args . valgrind_track_origins , args . valgrind_extra_params ) if args . gdb == index : dut . use_gdb ( ) contextlist . logger . info ( "GDB is activated for node %i" % index ) if args . gdbs == index : dut . use_gdbs ( True , args . gdbs_port ) contextlist . logger . info ( "GDBserver is activated for node %i" % index ) if args . vgdb == index : dut . use_vgdb ( ) contextlist . logger . info ( "VGDB is activated for node %i" % index ) if args . nobuf : dut . no_std_buf ( ) if init_cli_cmds is not None : dut . set_init_cli_cmds ( init_cli_cmds ) if post_cli_cmds is not None : dut . set_post_cli_cmds ( post_cli_cmds ) contextlist . duts . append ( dut ) contextlist . dutinformations . append ( dut . get_info ( ) )
Initialize process type Dut as DutProcess or DutConsole .
586
15
25,601
def allocate ( self , dut_configuration_list , args = None ) : dut_config_list = dut_configuration_list . get_dut_configuration ( ) # if we need one or more local hardware duts let's search attached # devices using DutDetection if not isinstance ( dut_config_list , list ) : raise AllocationError ( "Invalid dut configuration format!" ) if next ( ( item for item in dut_config_list if item . get ( "type" ) == "hardware" ) , False ) : self . _available_devices = DutDetection ( ) . get_available_devices ( ) if len ( self . _available_devices ) < len ( dut_config_list ) : raise AllocationError ( "Required amount of devices not available." ) # Enumerate all required DUT's try : for dut_config in dut_config_list : if not self . can_allocate ( dut_config . get_requirements ( ) ) : raise AllocationError ( "Resource type is not supported" ) self . _allocate ( dut_config ) except AllocationError : # Locally allocated don't need to be released any way for # now, so just re-raise the error raise alloc_list = AllocationContextList ( ) res_id = None for conf in dut_config_list : if conf . get ( "type" ) == "mbed" : res_id = conf . get ( "allocated" ) . get ( "target_id" ) context = AllocationContext ( resource_id = res_id , alloc_data = conf ) alloc_list . append ( context ) alloc_list . set_dut_init_function ( "serial" , init_generic_serial_dut ) alloc_list . set_dut_init_function ( "process" , init_process_dut ) alloc_list . set_dut_init_function ( "mbed" , init_mbed_dut ) return alloc_list
Allocates resources from available local devices .
450
9
25,602
def _allocate ( self , dut_configuration ) : # pylint: disable=too-many-branches if dut_configuration [ "type" ] == "hardware" : dut_configuration . set ( "type" , "mbed" ) if dut_configuration [ "type" ] == "mbed" : if not self . _available_devices : raise AllocationError ( "No available devices to allocate from" ) dut_reqs = dut_configuration . get_requirements ( ) platforms = None if 'allowed_platforms' not in dut_reqs else dut_reqs [ 'allowed_platforms' ] platform_name = None if 'platform_name' not in dut_reqs else dut_reqs [ "platform_name" ] if platform_name is None and platforms : platform_name = platforms [ 0 ] if platform_name and platforms : if platform_name not in platforms : raise AllocationError ( "Platform name not in allowed platforms." ) # Enumerate through all available devices for dev in self . _available_devices : if platform_name and dev [ "platform_name" ] != platform_name : self . logger . debug ( "Skipping device %s because of mismatching platform. " "Required %s but device was %s" , dev [ 'target_id' ] , platform_name , dev [ 'platform_name' ] ) continue if dev [ 'state' ] == 'allocated' : self . logger . debug ( "Skipping device %s because it was " "already allocated" , dev [ 'target_id' ] ) continue if DutDetection . is_port_usable ( dev [ 'serial_port' ] ) : dev [ 'state' ] = "allocated" dut_reqs [ 'allocated' ] = dev self . logger . info ( "Allocated device %s" , dev [ 'target_id' ] ) return True else : self . logger . info ( "Could not open serial port (%s) of " "allocated device %s" , dev [ 'serial_port' ] , dev [ 'target_id' ] ) # Didn't find a matching device to allocate so allocation failed raise AllocationError ( "No suitable local device available" ) elif dut_configuration [ "type" ] == "serial" : dut_reqs = dut_configuration . get_requirements ( ) if not dut_reqs . get ( "serial_port" ) : raise AllocationError ( "Serial port not defined for requirement {}" . format ( dut_reqs ) ) if not DutDetection . is_port_usable ( dut_reqs [ 'serial_port' ] ) : raise AllocationError ( "Serial port {} not usable" . format ( dut_reqs [ 'serial_port' ] ) ) # Successful allocation, return True return True
Internal allocation function . Allocates a single resource based on dut_configuration .
645
18
25,603
def register_tc_plugins ( self , plugin_name , plugin_class ) : if plugin_name in self . registered_plugins : raise PluginException ( "Plugin {} already registered! Duplicate " "plugins?" . format ( plugin_name ) ) self . logger . debug ( "Registering plugin %s" , plugin_name ) plugin_class . init ( bench = self . bench ) if plugin_class . get_bench_api ( ) is not None : register_func = self . plugin_types [ PluginTypes . BENCH ] register_func ( plugin_name , plugin_class ) if plugin_class . get_parsers ( ) is not None : register_func = self . plugin_types [ PluginTypes . PARSER ] register_func ( plugin_name , plugin_class ) if plugin_class . get_external_services ( ) is not None : register_func = self . plugin_types [ PluginTypes . EXTSERVICE ] register_func ( plugin_name , plugin_class ) self . registered_plugins . append ( plugin_name )
Loads a plugin as a dictionary and attaches needed parts to correct areas for testing parts .
230
18
25,604
def register_run_plugins ( self , plugin_name , plugin_class ) : if plugin_name in self . registered_plugins : raise PluginException ( "Plugin {} already registered! " "Duplicate plugins?" . format ( plugin_name ) ) self . logger . debug ( "Registering plugin %s" , plugin_name ) if plugin_class . get_allocators ( ) : register_func = self . plugin_types [ PluginTypes . ALLOCATOR ] register_func ( plugin_name , plugin_class ) self . registered_plugins . append ( plugin_name )
Loads a plugin as a dictionary and attaches needed parts to correct Icetea run global parts .
125
20
25,605
def load_default_tc_plugins ( self ) : for plugin_name , plugin_class in default_plugins . items ( ) : if issubclass ( plugin_class , PluginBase ) : try : self . register_tc_plugins ( plugin_name , plugin_class ( ) ) except PluginException as error : self . logger . debug ( error ) continue
Load default test case level plugins from icetea_lib . Plugin . plugins . default_plugins .
77
21
25,606
def load_custom_tc_plugins ( self , plugin_path = None ) : if not plugin_path : return directory = os . path . dirname ( plugin_path ) sys . path . append ( directory ) modulename = os . path . split ( plugin_path ) [ 1 ] # Strip of file extension. if "." in modulename : modulename = modulename [ : modulename . rindex ( "." ) ] try : module = importlib . import_module ( modulename ) except ImportError : raise PluginException ( "Unable to import custom plugin information from {}." . format ( plugin_path ) ) for plugin_name , plugin_class in module . plugins_to_load . items ( ) : if issubclass ( plugin_class , PluginBase ) : try : self . register_tc_plugins ( plugin_name , plugin_class ( ) ) except PluginException as error : self . logger . debug ( error ) continue
Load custom test case level plugins from plugin_path .
208
11
25,607
def load_default_run_plugins ( self ) : for plugin_name , plugin_class in default_plugins . items ( ) : if issubclass ( plugin_class , RunPluginBase ) : try : self . register_run_plugins ( plugin_name , plugin_class ( ) ) except PluginException as error : self . logger . debug ( error ) continue
Load default run level plugins from icetea_lib . Plugin . plugins . default_plugins .
78
20
25,608
def start_external_service ( self , service_name , conf = None ) : if service_name in self . _external_services : ser = self . _external_services [ service_name ] service = ser ( service_name , conf = conf , bench = self . bench ) try : service . start ( ) except PluginException : self . logger . exception ( "Starting service %s caused an exception!" , service_name ) raise PluginException ( "Failed to start external service {}" . format ( service_name ) ) self . _started_services . append ( service ) setattr ( self . bench , service_name , service ) else : self . logger . warning ( "Service %s not found. Check your plugins." , service_name )
Start external service service_name with configuration conf .
161
10
25,609
def stop_external_services ( self ) : for service in self . _started_services : self . logger . debug ( "Stopping application %s" , service . name ) try : service . stop ( ) except PluginException : self . logger . exception ( "Stopping external service %s caused and exception!" , service . name ) self . _started_services = [ ]
Stop all external services .
80
5
25,610
def _register_bench_extension ( self , plugin_name , plugin_instance ) : for attr in plugin_instance . get_bench_api ( ) . keys ( ) : if hasattr ( self . bench , attr ) : raise PluginException ( "Attribute {} already exists in bench! Unable to add " "plugin {}." . format ( attr , plugin_name ) ) setattr ( self . bench , attr , plugin_instance . get_bench_api ( ) . get ( attr ) )
Register a bench extension .
111
5
25,611
def _register_dataparser ( self , plugin_name , plugin_instance ) : for parser in plugin_instance . get_parsers ( ) . keys ( ) : if self . responseparser . has_parser ( parser ) : raise PluginException ( "Parser {} already registered to parsers! Unable to " "add parsers from {}." . format ( parser , plugin_name ) ) self . responseparser . add_parser ( parser , plugin_instance . get_parsers ( ) . get ( parser ) )
Register a parser .
112
4
25,612
def _register_external_service ( self , plugin_name , plugin_instance ) : for attr in plugin_instance . get_external_services ( ) . keys ( ) : if attr in self . _external_services : raise PluginException ( "External service with name {} already exists! Unable to add " "services from plugin {}." . format ( attr , plugin_name ) ) self . _external_services [ attr ] = plugin_instance . get_external_services ( ) . get ( attr )
Register an external service .
112
5
25,613
def _register_allocator ( self , plugin_name , plugin_instance ) : for allocator in plugin_instance . get_allocators ( ) . keys ( ) : if allocator in self . _allocators : raise PluginException ( "Allocator with name {} already exists! unable to add " "allocators from plugin {}" . format ( allocator , plugin_name ) ) self . _allocators [ allocator ] = plugin_instance . get_allocators ( ) . get ( allocator )
Register an allocator .
109
5
25,614
def create ( host , port , result_converter = None , testcase_converter = None , args = None ) : return SampleClient ( host , port , result_converter , testcase_converter , args )
Function which is called by Icetea to create an instance of the cloud client . This function must exists . This function myust not return None . Either return an instance of Client or raise .
52
39
25,615
def send_results ( self , result ) : if self . result_converter : print ( self . result_converter ( result ) ) else : print ( result )
Upload a result object to server . If resultConverter has been provided use it to convert result object to format accepted by the server . If needed use testcase_converter to convert tc metadata in result to suitable format .
38
47
25,616
def get_tc_api ( self , host , headers = None , cert = None , logger = None ) : if logger is None and self . logger : logger = self . logger return Api ( host , headers , cert , logger )
Gets HttpApi wrapped into a neat little package that raises TestStepFail if expected status code is not returned by the server . Default setting for expected status code is 200 . Set expected to None when calling methods to ignore the expected status code parameter or set raiseException = False to disable raising the exception .
50
63
25,617
def _raise_fail ( self , response , expected ) : try : if self . logger : self . logger . error ( "Status code " "{} != {}. \n\n " "Payload: {}" . format ( response . status_code , expected , response . content ) ) raise TestStepFail ( "Status code {} != {}." . format ( response . status_code , expected ) ) except TestStepFail : raise except : # pylint: disable=bare-except if self . logger : self . logger . error ( "Status code " "{} != {}. \n\n " "Payload: {}" . format ( response . status_code , expected , "Unable to parse payload" ) ) raise TestStepFail ( "Status code {} != {}." . format ( response . status_code , expected ) )
Raise a TestStepFail with neatly formatted error message
179
11
25,618
def generate ( self , * args , * * kwargs ) : xmlstr = str ( self ) filename = args [ 0 ] with open ( filename , 'w' ) as fil : fil . write ( xmlstr ) with open ( self . get_latest_filename ( 'junit.xml' ) , "w" ) as latest_report : latest_report . write ( xmlstr )
Implementation for generate method from ReportBase . Generates the xml and saves the report in Junit xml format .
84
23
25,619
def __generate ( results ) : doc , tag , text = Doc ( ) . tagtext ( ) # Counters for testsuite tag info count = 0 fails = 0 errors = 0 skips = 0 for result in results : # Loop through all results and count the ones that were not later retried. if result . passed ( ) is False : if result . retries_left > 0 : # This will appear in the list again, move on continue count += 1 if result . passed ( ) : # Passed, no need to increment anything else continue elif result . skipped ( ) : skips += 1 elif result . was_inconclusive ( ) : errors += 1 else : fails += 1 with tag ( 'testsuite' , tests = str ( count ) , failures = str ( fails ) , errors = str ( errors ) , skipped = str ( skips ) ) : for result in results : if result . passed ( ) is False and result . retries_left > 0 : continue class_name = result . get_tc_name ( ) models = result . get_dut_models ( ) if models : class_name = class_name + "." + models name = result . get_toolchain ( ) with tag ( 'testcase' , classname = class_name , name = name , time = result . get_duration ( seconds = True ) ) : if result . stdout : with tag ( 'system-out' ) : text ( result . stdout ) if result . passed ( ) : continue elif result . skipped ( ) : with tag ( 'skipped' ) : text ( result . skip_reason ) elif result . was_inconclusive ( ) : with tag ( 'error' , message = hex_escape_str ( result . fail_reason ) ) : text ( result . stderr ) else : with tag ( 'failure' , message = hex_escape_str ( result . fail_reason ) ) : text ( result . stderr ) return indent ( doc . getvalue ( ) , indentation = ' ' * 4 )
Static method which generates the Junit xml string from results
445
11
25,620
def open_dut_connections ( self ) : for dut in self . duts : try : dut . start_dut_thread ( ) if hasattr ( dut , "command" ) : dut . open_dut ( dut . command ) else : dut . open_dut ( ) except DutConnectionError : self . logger . exception ( "Failed when opening dut connection" ) dut . close_dut ( False ) dut . close_connection ( ) dut = None raise
Opens connections to Duts . Starts Dut read threads .
115
13
25,621
def check_flashing_need ( self , execution_type , build_id , force ) : binary_file_name = AllocationContextList . get_build ( build_id ) if binary_file_name : if execution_type == 'hardware' and os . path . isfile ( binary_file_name ) : if not force : #@todo: Make a better check for binary compatibility extension_split = os . path . splitext ( binary_file_name ) extension = extension_split [ - 1 ] . lower ( ) if extension != '.bin' and extension != '.hex' : self . logger . debug ( "File ('%s') is not supported to flash, skip it" % ( build_id ) ) return False return True return True else : raise ResourceInitError ( "Given binary %s does not exist" % build_id ) else : raise ResourceInitError ( "Given binary %s does not exist" % build_id )
Check if flashing of local device is required .
207
9
25,622
def remove_handlers ( logger ) : # TODO: Issue related to placeholder logger objects appearing in some rare cases. Check below # required as a workaround if hasattr ( logger , "handlers" ) : for handler in logger . handlers [ : : - 1 ] : try : if isinstance ( handler , logging . FileHandler ) : handler . close ( ) logger . removeHandler ( handler ) except : # pylint: disable=bare-except import traceback traceback . print_exc ( ) break
Remove handlers from logger .
107
5
25,623
def get_base_logfilename ( logname ) : logdir = get_base_dir ( ) fname = os . path . join ( logdir , logname ) GLOBAL_LOGFILES . append ( fname ) return fname
Return filename for a logfile filename will contain the actual path + filename
55
14
25,624
def get_file_logger ( name , formatter = None ) : if name is None or name == "" : raise ValueError ( "Can't make a logger without name" ) logger = logging . getLogger ( name ) remove_handlers ( logger ) logger . setLevel ( logging . INFO ) if formatter is None : config = LOGGING_CONFIG . get ( name , { } ) . get ( "file" , DEFAULT_LOGGING_CONFIG . get ( "file" ) ) formatter = BenchFormatter ( config . get ( "level" , "DEBUG" ) , config . get ( "dateformat" ) ) func = get_testcase_logfilename ( name + ".log" ) handler = _get_filehandler_with_formatter ( func , formatter ) logger . addHandler ( handler ) return logger
Return a file logger that will log into a file located in the testcase log directory . Anything logged with a file logger won t be visible in the console or any other logger .
184
36
25,625
def _check_existing_logger ( loggername , short_name ) : if loggername in LOGGERS : # Check if short_name matches the existing one, if not update it if isinstance ( LOGGERS [ loggername ] , BenchLoggerAdapter ) : if ( "source" not in LOGGERS [ loggername ] . extra or LOGGERS [ loggername ] . extra [ "source" ] != short_name ) : LOGGERS [ loggername ] . extra [ "source" ] = short_name return LOGGERS [ loggername ] return None
Check if logger with name loggername exists .
130
10
25,626
def _add_filehandler ( logger , logpath , formatter = None , name = "Bench" ) : formatter = formatter if formatter else BenchFormatterWithType ( loggername = name ) handler = _get_filehandler_with_formatter ( logpath , formatter ) config = LOGGING_CONFIG . get ( name , { } ) . get ( "file" , DEFAULT_LOGGING_CONFIG . get ( "file" ) ) handler . setLevel ( getattr ( logging , config . get ( "level" , "DEBUG" ) ) ) logger . addHandler ( handler ) return logger
Adds a FileHandler to logger .
137
7
25,627
def _get_basic_logger ( loggername , log_to_file , logpath ) : logger = logging . getLogger ( loggername ) logger . propagate = False remove_handlers ( logger ) logger . setLevel ( logging . DEBUG ) logger_config = LOGGING_CONFIG . get ( loggername , DEFAULT_LOGGING_CONFIG ) if TRUNCATE_LOG or logger_config . get ( "truncate_logs" ) . get ( "truncate" ) : cfilter = ContextFilter ( ) trunc_logs = logger_config . get ( "truncate_logs" ) # pylint: disable=invalid-name cfilter . MAXIMUM_LENGTH = trunc_logs . get ( "max_len" , DEFAULT_LOGGING_CONFIG . get ( "truncate_logs" ) . get ( "max_len" ) ) cfilter . REVEAL_LENGTH = trunc_logs . get ( "reveal_len" , DEFAULT_LOGGING_CONFIG . get ( "truncate_logs" ) . get ( "reveal_len" ) ) logger . addFilter ( cfilter ) # Filehandler for logger if log_to_file : _add_filehandler ( logger , logpath , name = loggername ) return logger
Get a logger with our basic configuration done .
303
9
25,628
def get_resourceprovider_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS loggername = name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logger_config = LOGGING_CONFIG . get ( name , DEFAULT_LOGGING_CONFIG ) logger = _get_basic_logger ( loggername , log_to_file , get_base_logfilename ( loggername + ".log" ) ) cbh = logging . StreamHandler ( ) cbh . formatter = BenchFormatterWithType ( COLOR_ON ) if VERBOSE_LEVEL > 0 and not SILENT_ON : cbh . setLevel ( logging . DEBUG ) elif SILENT_ON : cbh . setLevel ( logging . WARN ) else : cbh . setLevel ( getattr ( logging , logger_config . get ( "level" ) ) ) logger . addHandler ( cbh ) LOGGERS [ loggername ] = BenchLoggerAdapter ( logger , { "source" : short_name } ) return LOGGERS [ loggername ]
Get a logger for ResourceProvider and it s components such as Allocators .
263
16
25,629
def get_external_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS loggername = name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logging_config = LOGGING_CONFIG . get ( name , LOGGING_CONFIG . get ( "external" ) ) filename = logging_config . get ( "file" , { } ) . get ( "name" , loggername ) if not filename . endswith ( ".log" ) : filename = str ( filename ) + ".log" logger = _get_basic_logger ( loggername , log_to_file , get_base_logfilename ( filename ) ) cbh = logging . StreamHandler ( ) cbh . formatter = BenchFormatterWithType ( COLOR_ON ) if VERBOSE_LEVEL == 1 and not SILENT_ON : cbh . setLevel ( logging . INFO ) elif VERBOSE_LEVEL >= 2 and not SILENT_ON : cbh . setLevel ( logging . DEBUG ) elif SILENT_ON : cbh . setLevel ( logging . ERROR ) else : cbh . setLevel ( getattr ( logging , logging_config . get ( "level" ) ) ) logger . addHandler ( cbh ) LOGGERS [ loggername ] = BenchLoggerAdapter ( logger , { "source" : short_name } ) return LOGGERS [ loggername ]
Get a logger for external modules whose logging should usually be on a less verbose level .
335
18
25,630
def get_bench_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS # Get the root bench logger if name is none or empty or bench if name is None or name == "" or name == "bench" : return LOGGERS [ "bench" ] loggername = "bench." + name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logger = _get_basic_logger ( loggername , log_to_file , get_testcase_logfilename ( loggername + ".log" ) ) logger . propagate = True LOGGERS [ loggername ] = BenchLoggerAdapter ( logger , { "source" : short_name } ) return LOGGERS [ loggername ]
Return a logger instance for given name . The logger will be a child of the bench logger so anything that is logged to it will be also logged to bench logger . If a logger with the given name doesn t already exist create it using the given parameters .
182
51
25,631
def _read_config ( config_location ) : global LOGGING_CONFIG with open ( config_location , "r" ) as config_loc : cfg_file = json . load ( config_loc ) if "logging" in cfg_file : log_dict = cfg_file . get ( "logging" ) with open ( os . path . abspath ( os . path . join ( __file__ , os . path . pardir , 'logging_schema.json' ) ) ) as schema_file : logging_schema = json . load ( schema_file ) jsonschema . validate ( log_dict , logging_schema ) merged = jsonmerge . merge ( LOGGING_CONFIG , log_dict ) LOGGING_CONFIG = merged
Read configuration for logging from a json file . Merges the read dictionary to LOGGING_CONFIG .
174
22
25,632
def format ( self , record ) : if not hasattr ( record , "type" ) : record . type = " " return self . _formatter . format ( record )
Format record with formatter .
37
6
25,633
def format_message ( msg ) : callerframerecord = inspect . stack ( ) [ 2 ] frame = callerframerecord [ 0 ] info = inspect . getframeinfo ( frame ) _ , filename = os . path . split ( info . filename ) caller_site = "In file {!s}, in function {!s}, at line {:d}" . format ( filename , info . function , info . lineno ) return "{!s}\n{!s}\n{!s}" . format ( msg , caller_site , info . code_context )
Formatting function for assert messages . Fetches the filename function and line number of the code causing the fail and formats it into a three - line error message . Stack inspection is used to get the information . Originally done by BLE - team for their testcases .
118
53
25,634
def assertTraceDoesNotContain ( response , message ) : if not hasattr ( response , "verify_trace" ) : raise AttributeError ( "Response object does not contain verify_trace method!" ) if response . verify_trace ( message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" in response' % message )
Raise TestStepFail if response . verify_trace finds message from response traces .
80
17
25,635
def assertTraceContains ( response , message ) : if not hasattr ( response , "verify_trace" ) : raise AttributeError ( "Response object does not contain verify_trace method!" ) if not response . verify_trace ( message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" not in response' % message )
Raise TestStepFail if response . verify_trace does not find message from response traces .
80
19
25,636
def assertDutTraceDoesNotContain ( dut , message , bench ) : if not hasattr ( bench , "verify_trace" ) : raise AttributeError ( "Bench object does not contain verify_trace method!" ) if bench . verify_trace ( dut , message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" in response' % message )
Raise TestStepFail if bench . verify_trace does not find message from dut traces .
88
20
25,637
def assertNone ( expr , message = None ) : if expr is not None : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s != None" % str ( expr ) )
Assert that expr is None .
49
7
25,638
def assertNotNone ( expr , message = None ) : if expr is None : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s == None" % str ( expr ) )
Assert that expr is not None .
49
8
25,639
def assertEqual ( first , second , message = None ) : if not first == second : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s != %s" % ( str ( first ) , str ( second ) ) )
Assert that first equals second .
60
7
25,640
def assertNotEqual ( first , second , message = None ) : if not first != second : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s == %s" % ( str ( first ) , str ( second ) ) )
Assert that first does not equal second .
61
9
25,641
def assertJsonContains ( jsonStr = None , key = None , message = None ) : if jsonStr is not None : try : data = json . loads ( jsonStr ) if key not in data : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: " "Key : %s is not " "in : %s" % ( str ( key ) , str ( jsonStr ) ) ) except ( TypeError , ValueError ) as e : raise TestStepFail ( format_message ( message ) if message is not None else "Unable to parse json " + str ( e ) ) else : raise TestStepFail ( format_message ( message ) if message is not None else "Json string is empty" )
Assert that jsonStr contains key .
163
8
25,642
def get_path ( filename ) : path = abspath ( filename ) if os . path . isdir ( filename ) else dirname ( abspath ( filename ) ) return path
Get absolute path for filename .
37
6
25,643
def get_git_file_path ( filename ) : git_root = get_git_root ( filename ) return relpath ( filename , git_root ) . replace ( "\\" , "/" ) if git_root else ''
Get relative path for filename in git root .
50
9
25,644
def get_git_info ( git_folder , verbose = False ) : if verbose : print ( "detect GIT info by folder: '%s'" % git_folder ) try : git_info = { "commitid" : get_commit_id ( git_folder ) , "branch" : get_current_branch ( git_folder ) , "git_path" : get_git_file_path ( git_folder ) , "url" : get_remote_url ( git_folder ) , "scm" : "unknown" , "scm_group" : "unknown" , "scm_path" : "unknown" , "scm_link" : "" } if is_git_root_dirty ( git_folder ) : git_info [ 'dirty' ] = True except Exception as err : # pylint: disable=broad-except print ( "GitTool exception:" ) print ( err ) return { } if isinstance ( git_info [ 'url' ] , str ) : match = re . search ( r"github\.com:(.*)\/(.*)" , git_info [ 'url' ] ) if match : git_info [ "scm" ] = "github.com" git_info [ "scm_path" ] = match . group ( 2 ) git_info [ "scm_group" ] = match . group ( 1 ) scm_link_end = " %s/%s" % ( git_info [ "scm_group" ] , git_info [ "scm_path" ] . replace ( ".git" , "" ) ) git_info [ "scm_link" ] = "https://github.com/" + scm_link_end git_info [ "scm_link" ] += "/tree/%s/%s" % ( git_info [ 'commitid' ] , git_info [ "git_path" ] ) if verbose : print ( "all git_info:" ) print ( git_info ) return git_info
Detect GIT information by folder .
450
7
25,645
def __get_git_bin ( ) : git = 'git' alternatives = [ '/usr/bin/git' ] for alt in alternatives : if os . path . exists ( alt ) : git = alt break return git
Get git binary location .
47
5
25,646
def build ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . name return None
get build name .
65
4
25,647
def build_date ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . date return None
get build date .
67
4
25,648
def build_sha1 ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . sha1 return None
get sha1 hash of build .
70
8
25,649
def build_git_url ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . giturl return None
get build git url .
70
5
25,650
def build_data ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . get_data ( ) return None
get build data .
71
4
25,651
def build_branch ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . branch return None
get build branch .
68
4
25,652
def buildcommit ( self ) : # pylint: disable=len-as-condition if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . commit_id return None
get build commit id .
68
5
25,653
def set_verdict ( self , verdict , retcode = - 1 , duration = - 1 ) : verdict = verdict . lower ( ) if not verdict in [ 'pass' , 'fail' , 'unknown' , 'skip' , 'inconclusive' ] : raise ValueError ( "Unknown verdict {}" . format ( verdict ) ) if retcode == - 1 and verdict == 'pass' : retcode = 0 self . __verdict = verdict self . retcode = retcode if duration >= 0 : self . duration = duration
Set the final verdict for this Result .
113
8
25,654
def build_result_metadata ( self , data = None , args = None ) : data = data if data else self . _build_result_metainfo ( args ) if data . get ( "build_branch" ) : self . build_branch = data . get ( "build_branch" ) if data . get ( "buildcommit" ) : self . buildcommit = data . get ( "buildcommit" ) if data . get ( "build_git_url" ) : self . build_git_url = data . get ( "build_git_url" ) if data . get ( "build_url" ) : self . build_url = data . get ( "build_url" ) if data . get ( "campaign" ) : self . campaign = data . get ( "campaign" ) if data . get ( "job_id" ) : self . job_id = data . get ( "job_id" ) if data . get ( "toolchain" ) : self . toolchain = data . get ( "toolchain" ) if data . get ( "build_date" ) : self . build_date = data . get ( "build_date" )
collect metadata into this object
258
5
25,655
def _build_result_metainfo ( args ) : data = dict ( ) if hasattr ( args , "branch" ) and args . branch : data [ "build_branch" ] = args . branch if hasattr ( args , "commitId" ) and args . commitId : data [ "buildcommit" ] = args . commitId if hasattr ( args , "gitUrl" ) and args . gitUrl : data [ "build_git_url" ] = args . gitUrl if hasattr ( args , "buildUrl" ) and args . buildUrl : data [ "build_url" ] = args . buildUrl if hasattr ( args , "campaign" ) and args . campaign : data [ "campaign" ] = args . campaign if hasattr ( args , "jobId" ) and args . jobId : data [ "job_id" ] = args . jobId if hasattr ( args , "toolchain" ) and args . toolchain : data [ "toolchain" ] = args . toolchain if hasattr ( args , "buildDate" ) and args . buildDate : data [ "build_date" ] = args . buildDate return data
Internal helper for collecting metadata from args to results
254
9
25,656
def get_duration ( self , seconds = False ) : if seconds : return str ( self . duration ) delta = datetime . timedelta ( seconds = self . duration ) return str ( delta )
Get test case duration .
41
5
25,657
def has_logs ( self ) : found_files = [ ] if self . logpath is None : return found_files if os . path . exists ( self . logpath ) : for root , _ , files in os . walk ( os . path . abspath ( self . logpath ) ) : for fil in files : found_files . append ( os . path . join ( root , fil ) ) return found_files
Check if log files are available and return file names if they exist .
91
14
25,658
def open_connection ( self ) : self . logger . debug ( "Open CLI Process '%s'" , ( self . comport ) , extra = { 'type' : '<->' } ) self . cmd = self . comport if isinstance ( self . comport , list ) else [ self . comport ] if not self . comport : raise DutConnectionError ( "Process not defined!" ) try : self . build = Build . init ( self . cmd [ 0 ] ) except NotImplementedError as error : self . logger . error ( "Build initialization failed. Check your build location." ) self . logger . debug ( error ) raise DutConnectionError ( error ) # Start process&reader thread. Call Dut.process_dut() when new data is coming app = self . config . get ( "application" ) if app and app . get ( "bin_args" ) : self . cmd = self . cmd + app . get ( "bin_args" ) try : self . start_process ( self . cmd , processing_callback = lambda : Dut . process_dut ( self ) ) except KeyboardInterrupt : raise except Exception as error : raise DutConnectionError ( "Couldn't start DUT target process {}" . format ( error ) )
Open connection by starting the process .
275
7
25,659
def writeline ( self , data , crlf = "\n" ) : # pylint: disable=arguments-differ GenericProcess . writeline ( self , data , crlf = crlf )
Write data to process .
44
5
25,660
def _jsonfileconstructor ( self , filename = None , filepath = None , logger = None ) : if filepath : path = filepath else : tc_path = os . path . abspath ( os . path . join ( inspect . getfile ( self . bench . __class__ ) , os . pardir ) ) path = os . path . abspath ( os . path . join ( tc_path , os . pardir , "session_data" ) ) name = "default_file.json" if not filename else filename log = self . bench . logger if not logger else logger self . bench . logger . info ( "Setting json file location to: {}" . format ( path ) ) return files . JsonFile ( log , path , name )
Constructor method for the JsonFile object .
163
10
25,661
def _get_sd ( file_descr ) : for stream_descr in NonBlockingStreamReader . _streams : if file_descr == stream_descr . stream . fileno ( ) : return stream_descr return None
Get streamdescriptor matching file_descr fileno .
53
13
25,662
def _read_fd ( file_descr ) : try : line = os . read ( file_descr , 1024 * 1024 ) except OSError : stream_desc = NonBlockingStreamReader . _get_sd ( file_descr ) if stream_desc is not None : stream_desc . has_error = True if stream_desc . callback is not None : stream_desc . callback ( ) return 0 if line : stream_desc = NonBlockingStreamReader . _get_sd ( file_descr ) if stream_desc is None : return 0 # Process closing if IS_PYTHON3 : try : # @TODO: further develop for not ascii/unicode binary content line = line . decode ( "ascii" ) except UnicodeDecodeError : line = repr ( line ) stream_desc . buf += line # Break lines split = stream_desc . buf . split ( os . linesep ) for line in split [ : - 1 ] : stream_desc . read_queue . appendleft ( strip_escape ( line . strip ( ) ) ) if stream_desc . callback is not None : stream_desc . callback ( ) # Store the remainded, its either '' if last char was '\n' # or remaining buffer before line end stream_desc . buf = split [ - 1 ] return len ( line ) return 0
Read incoming data from file handle . Then find the matching StreamDescriptor by file_descr value .
295
22
25,663
def _read_select_kqueue ( k_queue ) : npipes = len ( NonBlockingStreamReader . _streams ) # Create list of kevent objects # pylint: disable=no-member kevents = [ select . kevent ( s . stream . fileno ( ) , filter = select . KQ_FILTER_READ , flags = select . KQ_EV_ADD | select . KQ_EV_ENABLE ) for s in NonBlockingStreamReader . _streams ] while NonBlockingStreamReader . _run_flag : events = k_queue . control ( kevents , npipes , 0.5 ) # Wake up twice in second for event in events : if event . filter == select . KQ_FILTER_READ : # pylint: disable=no-member NonBlockingStreamReader . _read_fd ( event . ident ) # Check if new pipes added. if npipes != len ( NonBlockingStreamReader . _streams ) : return
Read PIPES using BSD Kqueue
218
9
25,664
def stop ( self ) : # print('stopping NonBlockingStreamReader..') # print('acquire..') NonBlockingStreamReader . _stream_mtx . acquire ( ) # print('acquire..ok') NonBlockingStreamReader . _streams . remove ( self . _descriptor ) if not NonBlockingStreamReader . _streams : NonBlockingStreamReader . _run_flag = False # print('release..') NonBlockingStreamReader . _stream_mtx . release ( ) # print('release..ok') if NonBlockingStreamReader . _run_flag is False : # print('join..') NonBlockingStreamReader . _rt . join ( ) # print('join..ok') del NonBlockingStreamReader . _rt NonBlockingStreamReader . _rt = None
Stop the reader
177
3
25,665
def use_gdbs ( self , gdbs = True , port = 2345 ) : self . gdbs = gdbs self . gdbs_port = port
Set gdbs use for process .
38
8
25,666
def use_valgrind ( self , tool , xml , console , track_origins , valgrind_extra_params ) : self . valgrind = tool self . valgrind_xml = xml self . valgrind_console = console self . valgrind_track_origins = track_origins self . valgrind_extra_params = valgrind_extra_params if not tool in [ 'memcheck' , 'callgrind' , 'massif' ] : raise AttributeError ( "Invalid valgrind tool: %s" % tool )
Use Valgrind .
126
5
25,667
def __get_valgrind_params ( self ) : valgrind = [ ] if self . valgrind : valgrind . extend ( [ 'valgrind' ] ) if self . valgrind == 'memcheck' : valgrind . extend ( [ '--tool=memcheck' , '--leak-check=full' ] ) if self . valgrind_track_origins : valgrind . extend ( [ '--track-origins=yes' ] ) if self . valgrind_console : # just dump the default output, which is text dumped to console valgrind . extend ( [ ] ) elif self . valgrind_xml : valgrind . extend ( [ '--xml=yes' , '--xml-file=' + LogManager . get_testcase_logfilename ( self . name + '_valgrind_mem.xml' , prepend_tc_name = True ) ] ) else : valgrind . extend ( [ '--log-file=' + LogManager . get_testcase_logfilename ( self . name + '_valgrind_mem.txt' ) ] ) elif self . valgrind == 'callgrind' : valgrind . extend ( [ '--tool=callgrind' , '--dump-instr=yes' , '--simulate-cache=yes' , '--collect-jumps=yes' ] ) if self . valgrind_console : # just dump the default output, which is text dumped to console valgrind . extend ( [ ] ) elif self . valgrind_xml : valgrind . extend ( [ '--xml=yes' , '--xml-file=' + LogManager . get_testcase_logfilename ( self . name + '_valgrind_calls.xml' , prepend_tc_name = True ) ] ) else : valgrind . extend ( [ '--callgrind-out-file=' + LogManager . get_testcase_logfilename ( self . name + '_valgrind_calls.data' ) ] ) elif self . valgrind == 'massif' : valgrind . extend ( [ '--tool=massif' ] ) valgrind . extend ( [ '--massif-out-file=' + LogManager . get_testcase_logfilename ( self . name + '_valgrind_massif.data' ) ] ) # this allows one to specify misc params to valgrind, # eg. "--threshold=0.4" to get some more data from massif if self . valgrind_extra_params != '' : valgrind . extend ( self . valgrind_extra_params . split ( ) ) return valgrind
Get Valgrind command as list .
606
8
25,668
def writeline ( self , data , crlf = "\r\n" ) : if self . read_thread : if self . read_thread . has_error ( ) : raise RuntimeError ( "Error writing PIPE" ) # Check if process still alive if self . proc . poll ( ) is not None : raise RuntimeError ( "Process stopped" ) if self . __print_io : self . logger . info ( data , extra = { 'type' : '-->' } ) self . proc . stdin . write ( bytearray ( data + crlf , 'ascii' ) ) self . proc . stdin . flush ( )
Writeline implementation .
141
4
25,669
def load ( filename ) : json_obj = Seed . load ( filename ) return SeedInteger ( json_obj [ "seed_value" ] , json_obj [ "seed_id" ] , json_obj [ "date" ] )
Load seed from a file .
51
6
25,670
def get_pyserial_version ( self ) : pyserial_version = pkg_resources . require ( "pyserial" ) [ 0 ] . version version = 3.0 match = self . re_float . search ( pyserial_version ) if match : try : version = float ( match . group ( 0 ) ) except ValueError : version = 3.0 # We will assume you've got latest (3.0+) return version
! Retrieve pyserial module version
97
8
25,671
def readline ( self , timeout = 1 ) : tries = 0 while 1 : try : block = self . read ( 512 ) if isinstance ( block , bytes ) : block = block . decode ( ) elif isinstance ( block , str ) : block = block . decode ( ) else : raise ValueError ( "Unknown data" ) except SerialTimeoutException : # Exception that is raised on write timeouts. block = '' except SerialException : # In case the device can not be found or can not be configured. block = '' except ValueError : # Will be raised when parameter are out of range, e.g. baud rate, data bits. # UnicodeError-Raised when a Unicode-related encoding or # decoding error occurs. It is a subclass of ValueError. block = '' with self . buffer_lock : # Let's lock, just in case self . buf += block pos = self . buf . find ( '\n' ) if pos >= 0 : line , self . buf = self . buf [ : pos + 1 ] , self . buf [ pos + 1 : ] return line tries += 1 if tries * self . timeout > timeout : break return None
maxsize is ignored timeout in seconds is the max time that is way for a complete line
246
18
25,672
def readlines ( self , timeout = 1 ) : lines = [ ] while 1 : line = self . readline ( timeout = timeout ) if line : lines . append ( line ) if not line or line [ - 1 : ] != '\n' : break return lines
read all lines that are available . abort after timeout when no more data arrives .
57
16
25,673
def set ( self , key , value ) : if key == "tags" : self . _set_tag ( tags = value ) else : if isinstance ( value , dict ) and key in self . _requirements and isinstance ( self . _requirements [ key ] , dict ) : self . _requirements [ key ] = merge ( self . _requirements [ key ] , value ) else : self . _requirements [ key ] = value
Sets the value for a specific requirement .
96
9
25,674
def _set_tag ( self , tag = None , tags = None , value = True ) : existing_tags = self . _requirements . get ( "tags" ) if tags and not tag : existing_tags = merge ( existing_tags , tags ) self . _requirements [ "tags" ] = existing_tags elif tag and not tags : existing_tags [ tag ] = value self . _requirements [ "tags" ] = existing_tags
Sets the value of a specific tag or merges existing tags with a dict of new tags . Either tag or tags must be None .
98
28
25,675
def icetea_main ( ) : from icetea_lib import IceteaManager manager = IceteaManager . IceteaManager ( ) return_code = manager . run ( ) sys . exit ( return_code )
Main function for running Icetea . Calls sys . exit with the return code to exit .
50
19
25,676
def build_docs ( location = "doc-source" , target = None , library = "icetea_lib" ) : cmd_ar = [ "sphinx-apidoc" , "-o" , location , library ] try : print ( "Generating api docs." ) retcode = check_call ( cmd_ar ) except CalledProcessError as error : print ( "Documentation build failed. Return code: {}" . format ( error . returncode ) ) return 3 except OSError as error : print ( error ) print ( "Documentation build failed. Are you missing Sphinx? Please install sphinx using " "'pip install sphinx'." ) return 3 target = "doc{}html" . format ( os . sep ) if target is None else target cmd_ar = [ "sphinx-build" , "-b" , "html" , location , target ] try : print ( "Building html documentation." ) retcode = check_call ( cmd_ar ) except CalledProcessError as error : print ( "Documentation build failed. Return code: {}" . format ( error . returncode ) ) return 3 except OSError as error : print ( error ) print ( "Documentation build failed. Are you missing Sphinx? Please install sphinx using " "'pip install sphinx'." ) return 3 print ( "Documentation built." ) return 0
Build documentation for Icetea . Start by autogenerating module documentation and finish by building html .
300
21
25,677
def find_next ( lines , find_str , start_index ) : mode = None if isinstance ( find_str , basestring ) : mode = 'normal' message = find_str elif isinstance ( find_str , Invert ) : mode = 'invert' message = str ( find_str ) else : raise TypeError ( "Unsupported message type" ) for i in range ( start_index , len ( lines ) ) : if re . search ( message , lines [ i ] ) : return mode == 'normal' , i , lines [ i ] elif message in lines [ i ] : return mode == 'normal' , i , lines [ i ] if mode == 'invert' : return True , len ( lines ) , None raise LookupError ( "Not found" )
Find the next instance of find_str from lines starting from start_index .
172
16
25,678
def verify_message ( lines , expected_response ) : position = 0 if isinstance ( expected_response , basestring ) : expected_response = [ expected_response ] if isinstance ( expected_response , set ) : expected_response = list ( expected_response ) if not isinstance ( expected_response , list ) : raise TypeError ( "verify_message: expectedResponse must be list, set or string" ) for message in expected_response : try : found , position , _ = find_next ( lines , message , position ) if not found : return False position = position + 1 except TypeError : return False except LookupError : return False return True
Looks for expectedResponse in lines .
142
7
25,679
def _cleanlogs ( silent = False , log_location = "log" ) : try : print ( "cleaning up Icetea log directory." ) shutil . rmtree ( log_location , ignore_errors = silent , onerror = None if silent else _clean_onerror ) except OSError as error : print ( error )
Cleans up Mbed - test default log directory .
76
11
25,680
def list_suites ( suitedir = "./testcases/suites" , cloud = False ) : suites = [ ] suites . extend ( TestSuite . get_suite_files ( suitedir ) ) # no suitedir, or no suites -> append cloud.get_campaigns() if cloud : names = cloud . get_campaign_names ( ) if names : suites . append ( "------------------------------------" ) suites . append ( "FROM CLOUD:" ) suites . extend ( names ) if not suites : return None from prettytable import PrettyTable table = PrettyTable ( [ "Testcase suites" ] ) for suite in suites : table . add_row ( [ suite ] ) return table
Static method for listing suites from both local source and cloud . Uses PrettyTable to generate the table .
149
20
25,681
def _parse_arguments ( ) : parser = get_base_arguments ( get_parser ( ) ) parser = get_tc_arguments ( parser ) args , unknown = parser . parse_known_args ( ) return args , unknown
Static method for paring arguments
52
6
25,682
def check_args ( self ) : parser = get_base_arguments ( get_parser ( ) ) parser = get_tc_arguments ( parser ) # Disable "Do not use len(SEQ) as condition value" # pylint: disable=C1801 if len ( sys . argv ) < 2 : self . logger . error ( "Icetea called with no arguments! " ) parser . print_help ( ) return False elif not self . args . ignore_invalid_params and self . unknown : self . logger . error ( "Unknown parameters received, exiting. " "To ignore this add --ignore_invalid_params flag." ) self . logger . error ( "Following parameters were unknown: {}" . format ( self . unknown ) ) parser . print_help ( ) return False return True
Validates that a valid number of arguments were received and that all arguments were recognised .
177
17
25,683
def _init_pluginmanager ( self ) : self . pluginmanager = PluginManager ( logger = self . logger ) self . logger . debug ( "Registering execution wide plugins:" ) self . pluginmanager . load_default_run_plugins ( ) self . pluginmanager . load_custom_run_plugins ( self . args . plugin_path ) self . logger . debug ( "Execution wide plugins loaded and registered." )
Initialize PluginManager and load run wide plugins .
89
10
25,684
def run ( self , args = None ) : # Disable "Too many branches" and "Too many return statemets" warnings # pylint: disable=R0912,R0911 retcodesummary = ExitCodes . EXIT_SUCCESS self . args = args if args else self . args if not self . check_args ( ) : return retcodesummary if self . args . clean : if not self . args . tc and not self . args . suite : return retcodesummary # If called with --version print version and exit version = get_fw_version ( ) if self . args . version and version : print ( version ) return retcodesummary elif self . args . version and not version : print ( "Unable to get version. Have you installed Icetea correctly?" ) return retcodesummary self . logger . info ( "Using Icetea version {}" . format ( version ) if version else "Unable to get Icetea version. Is Icetea installed?" ) # If cloud set, import cloud, get parameters from environment, initialize cloud cloud = self . _init_cloud ( self . args . cloud ) # Check if called with listsuites. If so, print out suites either from cloud or from local if self . args . listsuites : table = self . list_suites ( self . args . suitedir , cloud ) if table is None : self . logger . error ( "No suites found!" ) retcodesummary = ExitCodes . EXIT_FAIL else : print ( table ) return retcodesummary try : testsuite = TestSuite ( logger = self . logger , cloud_module = cloud , args = self . args ) except SuiteException as error : self . logger . error ( "Something went wrong in suite creation! {}" . format ( error ) ) retcodesummary = ExitCodes . EXIT_INCONC return retcodesummary if self . args . list : if self . args . cloud : testsuite . update_testcases ( ) testcases = testsuite . list_testcases ( ) print ( testcases ) return retcodesummary results = self . runtestsuite ( testsuite = testsuite ) if not results : retcodesummary = ExitCodes . EXIT_SUCCESS elif results . failure_count ( ) and self . args . failure_return_value is True : retcodesummary = ExitCodes . EXIT_FAIL elif results . inconclusive_count ( ) and self . args . failure_return_value is True : retcodesummary = ExitCodes . EXIT_INCONC return retcodesummary
Runs the set of tests within the given path .
577
11
25,685
def _cleanup_resourceprovider ( self ) : # Disable too broad exception warning # pylint: disable=W0703 self . resourceprovider = ResourceProvider ( self . args ) try : self . resourceprovider . cleanup ( ) self . logger . info ( "Cleanup done." ) except Exception as error : self . logger . error ( "Cleanup failed! %s" , error )
Calls cleanup for ResourceProvider of this run .
86
10
25,686
def _init_cloud ( self , cloud_arg ) : # Disable too broad exception warning # pylint: disable=W0703 cloud = None if cloud_arg : try : if hasattr ( self . args , "cm" ) : cloud_module = self . args . cm if self . args . cm else None self . logger . info ( "Creating cloud module {}." . format ( cloud_module ) ) else : cloud_module = None cloud = Cloud ( host = None , module = cloud_module , logger = self . logger , args = self . args ) except Exception as error : self . logger . warning ( "Cloud module could not be initialized: {}" . format ( error ) ) cloud = None return cloud
Initializes Cloud module if cloud_arg is set .
154
11
25,687
def generate ( self , * args , * * kwargs ) : title = kwargs . get ( "title" ) heads = kwargs . get ( "heads" ) refresh = kwargs . get ( "refresh" ) filename = args [ 0 ] report = self . _create ( title , heads , refresh , path_start = os . path . dirname ( filename ) ) ReportHtml . save ( report , filename )
Implementation for the generate method defined in ReportBase . Generates a html report and saves it .
95
20
25,688
def check_int ( integer ) : if not isinstance ( integer , str ) : return False if integer [ 0 ] in ( '-' , '+' ) : return integer [ 1 : ] . isdigit ( ) return integer . isdigit ( )
Check if number is integer or not .
53
8
25,689
def _is_pid_running_on_unix ( pid ) : try : os . kill ( pid , 0 ) except OSError as err : # if error is ESRCH, it means the process doesn't exist return not err . errno == os . errno . ESRCH return True
Check if PID is running for Unix systems .
66
9
25,690
def _is_pid_running_on_windows ( pid ) : import ctypes . wintypes kernel32 = ctypes . windll . kernel32 handle = kernel32 . OpenProcess ( 1 , 0 , pid ) if handle == 0 : return False exit_code = ctypes . wintypes . DWORD ( ) ret = kernel32 . GetExitCodeProcess ( handle , ctypes . byref ( exit_code ) ) is_alive = ( ret == 0 or exit_code . value == _STILL_ALIVE ) # pylint: disable=undefined-variable kernel32 . CloseHandle ( handle ) return is_alive
Check if PID is running for Windows systems
139
8
25,691
def strip_escape ( string = '' , encoding = "utf-8" ) : # pylint: disable=redefined-outer-name matches = [ ] try : if hasattr ( string , "decode" ) : string = string . decode ( encoding ) except Exception : # pylint: disable=broad-except # Tried to decode something that is not decodeable in the specified encoding. Let's just # move on. pass try : for match in ansi_eng . finditer ( string ) : matches . append ( match ) except TypeError as error : raise TypeError ( "Unable to strip escape characters from data {}: {}" . format ( string , error ) ) matches . reverse ( ) for match in matches : start = match . start ( ) end = match . end ( ) string = string [ 0 : start ] + string [ end : ] return string
Strip escape characters from string .
187
7
25,692
def import_module ( modulename ) : module = None try : module = importlib . import_module ( modulename ) except ImportError : # If importing fails we see if the modulename has dots in it, split the name. if "." in modulename : modules = modulename . split ( "." ) package = "." . join ( modules [ 1 : len ( modules ) ] ) # Might raise an ImportError again. If so, we really failed to import the module. module = importlib . import_module ( package ) else : # No dots, really unable to import the module. Raise. raise return module
Static method for importing module modulename . Can handle relative imports as well .
136
16
25,693
def get_abs_path ( relative_path ) : abs_path = os . path . sep . join ( os . path . abspath ( sys . modules [ __name__ ] . __file__ ) . split ( os . path . sep ) [ : - 1 ] ) abs_path = os . path . abspath ( abs_path + os . path . sep + relative_path ) return abs_path
Get absolute path for relative path .
88
7
25,694
def get_pkg_version ( pkg_name , parse = False ) : import pkg_resources # part of setuptools try : version = pkg_resources . require ( pkg_name ) [ 0 ] . version return pkg_resources . parse_version ( version ) if parse else version except pkg_resources . DistributionNotFound : return None
Verify and get installed python package version .
77
9
25,695
def generate_object_graphs_by_class ( classlist ) : try : import objgraph import gc except ImportError : return graphcount = 0 if not isinstance ( classlist , list ) : classlist = [ classlist ] for class_item in classlist : for obj in gc . get_objects ( ) : if isinstance ( obj , class_item ) : graphcount += 1 objgraph . show_refs ( [ obj ] , filename = '%d_%s_%d_refs.png' % ( ogcounter , obj . __class__ . __name__ , graphcount ) ) objgraph . show_backrefs ( [ obj ] , filename = '%d_%s_%d_backrefs.png' % ( ogcounter , obj . __class__ . __name__ , graphcount ) )
Generate reference and backreference graphs for objects of type class for each class given in classlist . Useful for debugging reference leaks in framework etc .
186
29
25,696
def remove_empty_from_dict ( dictionary ) : if isinstance ( dictionary , dict ) : return dict ( ( k , remove_empty_from_dict ( v ) ) for k , v in iteritems ( dictionary ) if v and remove_empty_from_dict ( v ) ) elif isinstance ( dictionary , list ) : return [ remove_empty_from_dict ( v ) for v in dictionary if v and remove_empty_from_dict ( v ) ] return dictionary
Remove empty items from dictionary d
104
6
25,697
def set_or_delete ( dictionary , key , value ) : if value : dictionary [ key ] = value else : if dictionary . get ( key ) : del dictionary [ key ]
Set value as value of dict key key . If value is None delete key key from dict .
38
19
25,698
def initLogger ( name ) : # pylint: disable=invalid-name logger = logging . getLogger ( name ) logger . setLevel ( logging . INFO ) # Skip attaching StreamHandler if one is already attached to logger if not getattr ( logger , "streamhandler_set" , None ) : consolehandler = logging . StreamHandler ( ) formatter = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) consolehandler . setFormatter ( formatter ) consolehandler . setLevel ( logging . INFO ) logger . addHandler ( consolehandler ) logger . streamhandler_set = True return logger
Initializes a basic logger . Can be replaced when constructing the HttpApi object or afterwards with setter
151
22
25,699
def find_duplicate_keys ( data ) : out_dict = { } for key , value in data : if key in out_dict : raise ValueError ( "Duplicate key: {}" . format ( key ) ) out_dict [ key ] = value return out_dict
Find duplicate keys in a layer of ordered pairs . Intended as the object_pairs_hook callable for json . load or loads .
62
29