idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
25,700 | def _normalize_unit ( cls , unit ) : if unit in cls . UNITS_IN_SECONDS : return unit return cls . UNIT_ALIASES_REVERSE . get ( unit , None ) | Resolve a unit to its real name if it s an alias . |
25,701 | def convert ( cls , value , from_unit , to_unit ) : value_ms = value * cls . UNITS_IN_MILLISECONDS [ from_unit ] return value_ms / cls . UNITS_IN_MILLISECONDS [ to_unit ] | Convert a value from one time unit to another . |
25,702 | def init_handler ( self ) : assert self . options . get ( 'host' ) and self . options . get ( 'port' ) , "Invalid options" assert self . options . get ( 'to' ) , 'Recipients list is empty. SMTP disabled.' if not isinstance ( self . options [ 'to' ] , ( list , tuple ) ) : self . options [ 'to' ] = [ self . options [ 'to... | Check self options . |
25,703 | def iterator_mix ( * iterators ) : while True : one_left = False for it in iterators : try : yield it . next ( ) except StopIteration : pass else : one_left = True if not one_left : break | Iterating over list of iterators . Bit like zip but zip stops after the shortest iterator is empty abd here we go one until all iterators are empty . |
25,704 | def _normalize_path ( self , path ) : norm_path = os . path . normpath ( path ) return os . path . relpath ( norm_path , start = self . _get_working_dir ( ) ) | Normalizes a file path so that it returns a path relative to the root repo directory . |
25,705 | def translate_github_exception ( func ) : @ functools . wraps ( func ) def _wrapper ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except UnknownObjectException as e : logger . exception ( 'GitHub API 404 Exception' ) raise NotFoundError ( str ( e ) ) except GithubException as e : logger . exception ... | Decorator to catch GitHub - specific exceptions and raise them as GitClientError exceptions . |
25,706 | def violations ( self ) : return self . _all_violations if self . config . fail_on == FAIL_ON_ANY else self . _diff_violations | Returns either the diff violations or all violations depending on configuration . |
25,707 | def execute ( self ) : if not self . config . pr : raise NotPullRequestException logger . debug ( 'Using the following configuration:' ) for name , value in self . config . as_dict ( ) . items ( ) : logger . debug ( ' - {}={}' . format ( name , repr ( value ) ) ) logger . info ( 'Running Lintly against PR #{} for repo... | Executes a new build on a project . |
25,708 | def find_diff_violations ( self , patch ) : violations = collections . defaultdict ( list ) for line in patch . changed_lines : file_violations = self . _all_violations . get ( line [ 'file_name' ] ) if not file_violations : continue line_violations = [ v for v in file_violations if v . line == line [ 'line_number' ] ]... | Uses the diff for this build to find changed lines that also have violations . |
25,709 | def post_pr_comment ( self , patch ) : if self . has_violations : post_pr_comment = True try : logger . info ( 'Deleting old PR review comments' ) self . git_client . delete_pull_request_review_comments ( self . config . pr ) logger . info ( 'Creating PR review' ) self . git_client . create_pull_request_review ( self .... | Posts a comment to the GitHub PR if the diff results have issues . |
25,710 | def post_commit_status ( self ) : if self . violations : plural = '' if self . introduced_issues_count == 1 else 's' description = 'Pull Request introduced {} linting violation{}' . format ( self . introduced_issues_count , plural ) self . _post_status ( 'failure' , description ) else : self . _post_status ( 'success' ... | Posts results to a commit status in GitHub if this build is for a pull request . |
25,711 | def reg_to_lex ( conditions , wildcards ) : aliases = defaultdict ( set ) n_conds = [ ] for i , _ in enumerate ( conditions ) : n_cond = [ ] for char in conditions [ i ] : if char in wildcards : alias = '%s_%s' % ( char , len ( aliases [ char ] ) ) aliases [ char ] . add ( alias ) n_cond . append ( make_token ( alias ,... | Transform a regular expression into a LEPL object . |
25,712 | def main ( ** options ) : configure_logging ( log_all = options . get ( 'log' ) ) stdin_stream = click . get_text_stream ( 'stdin' ) stdin_text = stdin_stream . read ( ) click . echo ( stdin_text ) ci = find_ci_provider ( ) config = Config ( options , ci = ci ) build = LintlyBuild ( config , stdin_text ) try : build . ... | Slurp up linter output and send it to a GitHub PR review . |
25,713 | def translate_gitlab_exception ( func ) : @ functools . wraps ( func ) def _wrapper ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except gitlab . GitlabError as e : status_to_exception = { 401 : UnauthorizedError , 404 : NotFoundError , } exc_class = status_to_exception . get ( e . response_code , G... | Decorator to catch GitLab - specific exceptions and raise them as GitClientError exceptions . |
25,714 | def init_process_dut ( contextlist , conf , index , args ) : if "subtype" in conf and conf [ "subtype" ] : if conf [ "subtype" ] != "console" : msg = "Unrecognized process subtype: {}" contextlist . logger . error ( msg . format ( conf [ "subtype" ] ) ) raise ResourceInitError ( "Unrecognized process subtype: {}" ) con... | Initialize process type Dut as DutProcess or DutConsole . |
25,715 | def allocate ( self , dut_configuration_list , args = None ) : dut_config_list = dut_configuration_list . get_dut_configuration ( ) if not isinstance ( dut_config_list , list ) : raise AllocationError ( "Invalid dut configuration format!" ) if next ( ( item for item in dut_config_list if item . get ( "type" ) == "hardw... | Allocates resources from available local devices . |
25,716 | def _allocate ( self , dut_configuration ) : if dut_configuration [ "type" ] == "hardware" : dut_configuration . set ( "type" , "mbed" ) if dut_configuration [ "type" ] == "mbed" : if not self . _available_devices : raise AllocationError ( "No available devices to allocate from" ) dut_reqs = dut_configuration . get_req... | Internal allocation function . Allocates a single resource based on dut_configuration . |
25,717 | def register_tc_plugins ( self , plugin_name , plugin_class ) : if plugin_name in self . registered_plugins : raise PluginException ( "Plugin {} already registered! Duplicate " "plugins?" . format ( plugin_name ) ) self . logger . debug ( "Registering plugin %s" , plugin_name ) plugin_class . init ( bench = self . benc... | Loads a plugin as a dictionary and attaches needed parts to correct areas for testing parts . |
25,718 | def register_run_plugins ( self , plugin_name , plugin_class ) : if plugin_name in self . registered_plugins : raise PluginException ( "Plugin {} already registered! " "Duplicate plugins?" . format ( plugin_name ) ) self . logger . debug ( "Registering plugin %s" , plugin_name ) if plugin_class . get_allocators ( ) : r... | Loads a plugin as a dictionary and attaches needed parts to correct Icetea run global parts . |
25,719 | def load_default_tc_plugins ( self ) : for plugin_name , plugin_class in default_plugins . items ( ) : if issubclass ( plugin_class , PluginBase ) : try : self . register_tc_plugins ( plugin_name , plugin_class ( ) ) except PluginException as error : self . logger . debug ( error ) continue | Load default test case level plugins from icetea_lib . Plugin . plugins . default_plugins . |
25,720 | def load_custom_tc_plugins ( self , plugin_path = None ) : if not plugin_path : return directory = os . path . dirname ( plugin_path ) sys . path . append ( directory ) modulename = os . path . split ( plugin_path ) [ 1 ] if "." in modulename : modulename = modulename [ : modulename . rindex ( "." ) ] try : module = im... | Load custom test case level plugins from plugin_path . |
25,721 | def load_default_run_plugins ( self ) : for plugin_name , plugin_class in default_plugins . items ( ) : if issubclass ( plugin_class , RunPluginBase ) : try : self . register_run_plugins ( plugin_name , plugin_class ( ) ) except PluginException as error : self . logger . debug ( error ) continue | Load default run level plugins from icetea_lib . Plugin . plugins . default_plugins . |
25,722 | def start_external_service ( self , service_name , conf = None ) : if service_name in self . _external_services : ser = self . _external_services [ service_name ] service = ser ( service_name , conf = conf , bench = self . bench ) try : service . start ( ) except PluginException : self . logger . exception ( "Starting ... | Start external service service_name with configuration conf . |
25,723 | def stop_external_services ( self ) : for service in self . _started_services : self . logger . debug ( "Stopping application %s" , service . name ) try : service . stop ( ) except PluginException : self . logger . exception ( "Stopping external service %s caused and exception!" , service . name ) self . _started_servi... | Stop all external services . |
25,724 | def _register_bench_extension ( self , plugin_name , plugin_instance ) : for attr in plugin_instance . get_bench_api ( ) . keys ( ) : if hasattr ( self . bench , attr ) : raise PluginException ( "Attribute {} already exists in bench! Unable to add " "plugin {}." . format ( attr , plugin_name ) ) setattr ( self . bench ... | Register a bench extension . |
25,725 | def _register_dataparser ( self , plugin_name , plugin_instance ) : for parser in plugin_instance . get_parsers ( ) . keys ( ) : if self . responseparser . has_parser ( parser ) : raise PluginException ( "Parser {} already registered to parsers! Unable to " "add parsers from {}." . format ( parser , plugin_name ) ) sel... | Register a parser . |
25,726 | def _register_external_service ( self , plugin_name , plugin_instance ) : for attr in plugin_instance . get_external_services ( ) . keys ( ) : if attr in self . _external_services : raise PluginException ( "External service with name {} already exists! Unable to add " "services from plugin {}." . format ( attr , plugin... | Register an external service . |
25,727 | def _register_allocator ( self , plugin_name , plugin_instance ) : for allocator in plugin_instance . get_allocators ( ) . keys ( ) : if allocator in self . _allocators : raise PluginException ( "Allocator with name {} already exists! unable to add " "allocators from plugin {}" . format ( allocator , plugin_name ) ) se... | Register an allocator . |
25,728 | def create ( host , port , result_converter = None , testcase_converter = None , args = None ) : return SampleClient ( host , port , result_converter , testcase_converter , args ) | Function which is called by Icetea to create an instance of the cloud client . This function must exists . This function myust not return None . Either return an instance of Client or raise . |
25,729 | def send_results ( self , result ) : if self . result_converter : print ( self . result_converter ( result ) ) else : print ( result ) | Upload a result object to server . If resultConverter has been provided use it to convert result object to format accepted by the server . If needed use testcase_converter to convert tc metadata in result to suitable format . |
25,730 | def get_tc_api ( self , host , headers = None , cert = None , logger = None ) : if logger is None and self . logger : logger = self . logger return Api ( host , headers , cert , logger ) | Gets HttpApi wrapped into a neat little package that raises TestStepFail if expected status code is not returned by the server . Default setting for expected status code is 200 . Set expected to None when calling methods to ignore the expected status code parameter or set raiseException = False to disable raising the e... |
25,731 | def _raise_fail ( self , response , expected ) : try : if self . logger : self . logger . error ( "Status code " "{} != {}. \n\n " "Payload: {}" . format ( response . status_code , expected , response . content ) ) raise TestStepFail ( "Status code {} != {}." . format ( response . status_code , expected ) ) except Test... | Raise a TestStepFail with neatly formatted error message |
25,732 | def generate ( self , * args , ** kwargs ) : xmlstr = str ( self ) filename = args [ 0 ] with open ( filename , 'w' ) as fil : fil . write ( xmlstr ) with open ( self . get_latest_filename ( 'junit.xml' ) , "w" ) as latest_report : latest_report . write ( xmlstr ) | Implementation for generate method from ReportBase . Generates the xml and saves the report in Junit xml format . |
25,733 | def __generate ( results ) : doc , tag , text = Doc ( ) . tagtext ( ) count = 0 fails = 0 errors = 0 skips = 0 for result in results : if result . passed ( ) is False : if result . retries_left > 0 : continue count += 1 if result . passed ( ) : continue elif result . skipped ( ) : skips += 1 elif result . was_inconclus... | Static method which generates the Junit xml string from results |
25,734 | def open_dut_connections ( self ) : for dut in self . duts : try : dut . start_dut_thread ( ) if hasattr ( dut , "command" ) : dut . open_dut ( dut . command ) else : dut . open_dut ( ) except DutConnectionError : self . logger . exception ( "Failed when opening dut connection" ) dut . close_dut ( False ) dut . close_c... | Opens connections to Duts . Starts Dut read threads . |
25,735 | def check_flashing_need ( self , execution_type , build_id , force ) : binary_file_name = AllocationContextList . get_build ( build_id ) if binary_file_name : if execution_type == 'hardware' and os . path . isfile ( binary_file_name ) : if not force : extension_split = os . path . splitext ( binary_file_name ) extensio... | Check if flashing of local device is required . |
25,736 | def remove_handlers ( logger ) : if hasattr ( logger , "handlers" ) : for handler in logger . handlers [ : : - 1 ] : try : if isinstance ( handler , logging . FileHandler ) : handler . close ( ) logger . removeHandler ( handler ) except : import traceback traceback . print_exc ( ) break | Remove handlers from logger . |
25,737 | def get_base_logfilename ( logname ) : logdir = get_base_dir ( ) fname = os . path . join ( logdir , logname ) GLOBAL_LOGFILES . append ( fname ) return fname | Return filename for a logfile filename will contain the actual path + filename |
25,738 | def get_file_logger ( name , formatter = None ) : if name is None or name == "" : raise ValueError ( "Can't make a logger without name" ) logger = logging . getLogger ( name ) remove_handlers ( logger ) logger . setLevel ( logging . INFO ) if formatter is None : config = LOGGING_CONFIG . get ( name , { } ) . get ( "fil... | Return a file logger that will log into a file located in the testcase log directory . Anything logged with a file logger won t be visible in the console or any other logger . |
25,739 | def _check_existing_logger ( loggername , short_name ) : if loggername in LOGGERS : if isinstance ( LOGGERS [ loggername ] , BenchLoggerAdapter ) : if ( "source" not in LOGGERS [ loggername ] . extra or LOGGERS [ loggername ] . extra [ "source" ] != short_name ) : LOGGERS [ loggername ] . extra [ "source" ] = short_nam... | Check if logger with name loggername exists . |
25,740 | def _add_filehandler ( logger , logpath , formatter = None , name = "Bench" ) : formatter = formatter if formatter else BenchFormatterWithType ( loggername = name ) handler = _get_filehandler_with_formatter ( logpath , formatter ) config = LOGGING_CONFIG . get ( name , { } ) . get ( "file" , DEFAULT_LOGGING_CONFIG . ge... | Adds a FileHandler to logger . |
25,741 | def _get_basic_logger ( loggername , log_to_file , logpath ) : logger = logging . getLogger ( loggername ) logger . propagate = False remove_handlers ( logger ) logger . setLevel ( logging . DEBUG ) logger_config = LOGGING_CONFIG . get ( loggername , DEFAULT_LOGGING_CONFIG ) if TRUNCATE_LOG or logger_config . get ( "tr... | Get a logger with our basic configuration done . |
25,742 | def get_resourceprovider_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS loggername = name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logger_config = LOGGING_CONFIG . get ( name , DEFAULT_LOGGING_CONFIG ) logger = _get_basic_logger (... | Get a logger for ResourceProvider and it s components such as Allocators . |
25,743 | def get_external_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS loggername = name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logging_config = LOGGING_CONFIG . get ( name , LOGGING_CONFIG . get ( "external" ) ) filename = logging_con... | Get a logger for external modules whose logging should usually be on a less verbose level . |
25,744 | def get_bench_logger ( name = None , short_name = " " , log_to_file = True ) : global LOGGERS if name is None or name == "" or name == "bench" : return LOGGERS [ "bench" ] loggername = "bench." + name logger = _check_existing_logger ( loggername , short_name ) if logger is not None : return logger logger = _get_basic_... | Return a logger instance for given name . The logger will be a child of the bench logger so anything that is logged to it will be also logged to bench logger . If a logger with the given name doesn t already exist create it using the given parameters . |
25,745 | def init_base_logging ( directory = "./log" , verbose = 0 , silent = False , color = False , no_file = False , truncate = True , config_location = None ) : global LOGPATHDIR global STANDALONE_LOGGING global TRUNCATE_LOG global COLOR_ON global SILENT_ON global VERBOSE_LEVEL if config_location : try : _read_config ( conf... | Initialize the Icetea logging by creating a directory to store logs for this run and initialize the console logger for Icetea itself . |
25,746 | def _read_config ( config_location ) : global LOGGING_CONFIG with open ( config_location , "r" ) as config_loc : cfg_file = json . load ( config_loc ) if "logging" in cfg_file : log_dict = cfg_file . get ( "logging" ) with open ( os . path . abspath ( os . path . join ( __file__ , os . path . pardir , 'logging_schema.j... | Read configuration for logging from a json file . Merges the read dictionary to LOGGING_CONFIG . |
25,747 | def format ( self , record ) : if not hasattr ( record , "type" ) : record . type = " " return self . _formatter . format ( record ) | Format record with formatter . |
25,748 | def format_message ( msg ) : callerframerecord = inspect . stack ( ) [ 2 ] frame = callerframerecord [ 0 ] info = inspect . getframeinfo ( frame ) _ , filename = os . path . split ( info . filename ) caller_site = "In file {!s}, in function {!s}, at line {:d}" . format ( filename , info . function , info . lineno ) ret... | Formatting function for assert messages . Fetches the filename function and line number of the code causing the fail and formats it into a three - line error message . Stack inspection is used to get the information . Originally done by BLE - team for their testcases . |
25,749 | def assertTraceDoesNotContain ( response , message ) : if not hasattr ( response , "verify_trace" ) : raise AttributeError ( "Response object does not contain verify_trace method!" ) if response . verify_trace ( message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" in response' % message ) | Raise TestStepFail if response . verify_trace finds message from response traces . |
25,750 | def assertTraceContains ( response , message ) : if not hasattr ( response , "verify_trace" ) : raise AttributeError ( "Response object does not contain verify_trace method!" ) if not response . verify_trace ( message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" not in response' % message ) | Raise TestStepFail if response . verify_trace does not find message from response traces . |
25,751 | def assertDutTraceDoesNotContain ( dut , message , bench ) : if not hasattr ( bench , "verify_trace" ) : raise AttributeError ( "Bench object does not contain verify_trace method!" ) if bench . verify_trace ( dut , message , False ) : raise TestStepFail ( 'Assert: Message(s) "%s" in response' % message ) | Raise TestStepFail if bench . verify_trace does not find message from dut traces . |
25,752 | def assertNone ( expr , message = None ) : if expr is not None : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s != None" % str ( expr ) ) | Assert that expr is None . |
25,753 | def assertNotNone ( expr , message = None ) : if expr is None : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s == None" % str ( expr ) ) | Assert that expr is not None . |
25,754 | def assertEqual ( first , second , message = None ) : if not first == second : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s != %s" % ( str ( first ) , str ( second ) ) ) | Assert that first equals second . |
25,755 | def assertNotEqual ( first , second , message = None ) : if not first != second : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: %s == %s" % ( str ( first ) , str ( second ) ) ) | Assert that first does not equal second . |
25,756 | def assertJsonContains ( jsonStr = None , key = None , message = None ) : if jsonStr is not None : try : data = json . loads ( jsonStr ) if key not in data : raise TestStepFail ( format_message ( message ) if message is not None else "Assert: " "Key : %s is not " "in : %s" % ( str ( key ) , str ( jsonStr ) ) ) except (... | Assert that jsonStr contains key . |
25,757 | def get_path ( filename ) : path = abspath ( filename ) if os . path . isdir ( filename ) else dirname ( abspath ( filename ) ) return path | Get absolute path for filename . |
25,758 | def get_git_file_path ( filename ) : git_root = get_git_root ( filename ) return relpath ( filename , git_root ) . replace ( "\\" , "/" ) if git_root else '' | Get relative path for filename in git root . |
25,759 | def get_git_info ( git_folder , verbose = False ) : if verbose : print ( "detect GIT info by folder: '%s'" % git_folder ) try : git_info = { "commitid" : get_commit_id ( git_folder ) , "branch" : get_current_branch ( git_folder ) , "git_path" : get_git_file_path ( git_folder ) , "url" : get_remote_url ( git_folder ) , ... | Detect GIT information by folder . |
25,760 | def __get_git_bin ( ) : git = 'git' alternatives = [ '/usr/bin/git' ] for alt in alternatives : if os . path . exists ( alt ) : git = alt break return git | Get git binary location . |
25,761 | def build ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . name return None | get build name . |
25,762 | def build_date ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . date return None | get build date . |
25,763 | def build_sha1 ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . sha1 return None | get sha1 hash of build . |
25,764 | def build_git_url ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . giturl return None | get build git url . |
25,765 | def build_data ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . get_data ( ) return None | get build data . |
25,766 | def build_branch ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . branch return None | get build branch . |
25,767 | def buildcommit ( self ) : if len ( self . dutinformation ) > 0 and ( self . dutinformation . get ( 0 ) . build is not None ) : return self . dutinformation . get ( 0 ) . build . commit_id return None | get build commit id . |
25,768 | def set_verdict ( self , verdict , retcode = - 1 , duration = - 1 ) : verdict = verdict . lower ( ) if not verdict in [ 'pass' , 'fail' , 'unknown' , 'skip' , 'inconclusive' ] : raise ValueError ( "Unknown verdict {}" . format ( verdict ) ) if retcode == - 1 and verdict == 'pass' : retcode = 0 self . __verdict = verdic... | Set the final verdict for this Result . |
25,769 | def build_result_metadata ( self , data = None , args = None ) : data = data if data else self . _build_result_metainfo ( args ) if data . get ( "build_branch" ) : self . build_branch = data . get ( "build_branch" ) if data . get ( "buildcommit" ) : self . buildcommit = data . get ( "buildcommit" ) if data . get ( "bui... | collect metadata into this object |
25,770 | def _build_result_metainfo ( args ) : data = dict ( ) if hasattr ( args , "branch" ) and args . branch : data [ "build_branch" ] = args . branch if hasattr ( args , "commitId" ) and args . commitId : data [ "buildcommit" ] = args . commitId if hasattr ( args , "gitUrl" ) and args . gitUrl : data [ "build_git_url" ] = a... | Internal helper for collecting metadata from args to results |
25,771 | def get_duration ( self , seconds = False ) : if seconds : return str ( self . duration ) delta = datetime . timedelta ( seconds = self . duration ) return str ( delta ) | Get test case duration . |
25,772 | def has_logs ( self ) : found_files = [ ] if self . logpath is None : return found_files if os . path . exists ( self . logpath ) : for root , _ , files in os . walk ( os . path . abspath ( self . logpath ) ) : for fil in files : found_files . append ( os . path . join ( root , fil ) ) return found_files | Check if log files are available and return file names if they exist . |
25,773 | def open_connection ( self ) : self . logger . debug ( "Open CLI Process '%s'" , ( self . comport ) , extra = { 'type' : '<->' } ) self . cmd = self . comport if isinstance ( self . comport , list ) else [ self . comport ] if not self . comport : raise DutConnectionError ( "Process not defined!" ) try : self . build = ... | Open connection by starting the process . |
25,774 | def writeline ( self , data , crlf = "\n" ) : GenericProcess . writeline ( self , data , crlf = crlf ) | Write data to process . |
25,775 | def _jsonfileconstructor ( self , filename = None , filepath = None , logger = None ) : if filepath : path = filepath else : tc_path = os . path . abspath ( os . path . join ( inspect . getfile ( self . bench . __class__ ) , os . pardir ) ) path = os . path . abspath ( os . path . join ( tc_path , os . pardir , "sessio... | Constructor method for the JsonFile object . |
25,776 | def _get_sd ( file_descr ) : for stream_descr in NonBlockingStreamReader . _streams : if file_descr == stream_descr . stream . fileno ( ) : return stream_descr return None | Get streamdescriptor matching file_descr fileno . |
25,777 | def _read_fd ( file_descr ) : try : line = os . read ( file_descr , 1024 * 1024 ) except OSError : stream_desc = NonBlockingStreamReader . _get_sd ( file_descr ) if stream_desc is not None : stream_desc . has_error = True if stream_desc . callback is not None : stream_desc . callback ( ) return 0 if line : stream_desc ... | Read incoming data from file handle . Then find the matching StreamDescriptor by file_descr value . |
25,778 | def _read_select_kqueue ( k_queue ) : npipes = len ( NonBlockingStreamReader . _streams ) kevents = [ select . kevent ( s . stream . fileno ( ) , filter = select . KQ_FILTER_READ , flags = select . KQ_EV_ADD | select . KQ_EV_ENABLE ) for s in NonBlockingStreamReader . _streams ] while NonBlockingStreamReader . _run_fla... | Read PIPES using BSD Kqueue |
25,779 | def stop ( self ) : NonBlockingStreamReader . _stream_mtx . acquire ( ) NonBlockingStreamReader . _streams . remove ( self . _descriptor ) if not NonBlockingStreamReader . _streams : NonBlockingStreamReader . _run_flag = False NonBlockingStreamReader . _stream_mtx . release ( ) if NonBlockingStreamReader . _run_flag is... | Stop the reader |
25,780 | def use_gdbs ( self , gdbs = True , port = 2345 ) : self . gdbs = gdbs self . gdbs_port = port | Set gdbs use for process . |
25,781 | def use_valgrind ( self , tool , xml , console , track_origins , valgrind_extra_params ) : self . valgrind = tool self . valgrind_xml = xml self . valgrind_console = console self . valgrind_track_origins = track_origins self . valgrind_extra_params = valgrind_extra_params if not tool in [ 'memcheck' , 'callgrind' , 'ma... | Use Valgrind . |
25,782 | def __get_valgrind_params ( self ) : valgrind = [ ] if self . valgrind : valgrind . extend ( [ 'valgrind' ] ) if self . valgrind == 'memcheck' : valgrind . extend ( [ '--tool=memcheck' , '--leak-check=full' ] ) if self . valgrind_track_origins : valgrind . extend ( [ '--track-origins=yes' ] ) if self . valgrind_console... | Get Valgrind command as list . |
25,783 | def writeline ( self , data , crlf = "\r\n" ) : if self . read_thread : if self . read_thread . has_error ( ) : raise RuntimeError ( "Error writing PIPE" ) if self . proc . poll ( ) is not None : raise RuntimeError ( "Process stopped" ) if self . __print_io : self . logger . info ( data , extra = { 'type' : ' } ) self ... | Writeline implementation . |
25,784 | def load ( filename ) : json_obj = Seed . load ( filename ) return SeedInteger ( json_obj [ "seed_value" ] , json_obj [ "seed_id" ] , json_obj [ "date" ] ) | Load seed from a file . |
25,785 | def get_pyserial_version ( self ) : pyserial_version = pkg_resources . require ( "pyserial" ) [ 0 ] . version version = 3.0 match = self . re_float . search ( pyserial_version ) if match : try : version = float ( match . group ( 0 ) ) except ValueError : version = 3.0 return version | ! Retrieve pyserial module version |
25,786 | def readline ( self , timeout = 1 ) : tries = 0 while 1 : try : block = self . read ( 512 ) if isinstance ( block , bytes ) : block = block . decode ( ) elif isinstance ( block , str ) : block = block . decode ( ) else : raise ValueError ( "Unknown data" ) except SerialTimeoutException : block = '' except SerialExcepti... | maxsize is ignored timeout in seconds is the max time that is way for a complete line |
25,787 | def readlines ( self , timeout = 1 ) : lines = [ ] while 1 : line = self . readline ( timeout = timeout ) if line : lines . append ( line ) if not line or line [ - 1 : ] != '\n' : break return lines | read all lines that are available . abort after timeout when no more data arrives . |
25,788 | def set ( self , key , value ) : if key == "tags" : self . _set_tag ( tags = value ) else : if isinstance ( value , dict ) and key in self . _requirements and isinstance ( self . _requirements [ key ] , dict ) : self . _requirements [ key ] = merge ( self . _requirements [ key ] , value ) else : self . _requirements [ ... | Sets the value for a specific requirement . |
25,789 | def _set_tag ( self , tag = None , tags = None , value = True ) : existing_tags = self . _requirements . get ( "tags" ) if tags and not tag : existing_tags = merge ( existing_tags , tags ) self . _requirements [ "tags" ] = existing_tags elif tag and not tags : existing_tags [ tag ] = value self . _requirements [ "tags"... | Sets the value of a specific tag or merges existing tags with a dict of new tags . Either tag or tags must be None . |
25,790 | def icetea_main ( ) : from icetea_lib import IceteaManager manager = IceteaManager . IceteaManager ( ) return_code = manager . run ( ) sys . exit ( return_code ) | Main function for running Icetea . Calls sys . exit with the return code to exit . |
25,791 | def build_docs ( location = "doc-source" , target = None , library = "icetea_lib" ) : cmd_ar = [ "sphinx-apidoc" , "-o" , location , library ] try : print ( "Generating api docs." ) retcode = check_call ( cmd_ar ) except CalledProcessError as error : print ( "Documentation build failed. Return code: {}" . format ( erro... | Build documentation for Icetea . Start by autogenerating module documentation and finish by building html . |
25,792 | def find_next ( lines , find_str , start_index ) : mode = None if isinstance ( find_str , basestring ) : mode = 'normal' message = find_str elif isinstance ( find_str , Invert ) : mode = 'invert' message = str ( find_str ) else : raise TypeError ( "Unsupported message type" ) for i in range ( start_index , len ( lines ... | Find the next instance of find_str from lines starting from start_index . |
25,793 | def verify_message ( lines , expected_response ) : position = 0 if isinstance ( expected_response , basestring ) : expected_response = [ expected_response ] if isinstance ( expected_response , set ) : expected_response = list ( expected_response ) if not isinstance ( expected_response , list ) : raise TypeError ( "veri... | Looks for expectedResponse in lines . |
25,794 | def _cleanlogs ( silent = False , log_location = "log" ) : try : print ( "cleaning up Icetea log directory." ) shutil . rmtree ( log_location , ignore_errors = silent , onerror = None if silent else _clean_onerror ) except OSError as error : print ( error ) | Cleans up Mbed - test default log directory . |
25,795 | def list_suites ( suitedir = "./testcases/suites" , cloud = False ) : suites = [ ] suites . extend ( TestSuite . get_suite_files ( suitedir ) ) if cloud : names = cloud . get_campaign_names ( ) if names : suites . append ( "------------------------------------" ) suites . append ( "FROM CLOUD:" ) suites . extend ( name... | Static method for listing suites from both local source and cloud . Uses PrettyTable to generate the table . |
25,796 | def _parse_arguments ( ) : parser = get_base_arguments ( get_parser ( ) ) parser = get_tc_arguments ( parser ) args , unknown = parser . parse_known_args ( ) return args , unknown | Static method for paring arguments |
25,797 | def check_args ( self ) : parser = get_base_arguments ( get_parser ( ) ) parser = get_tc_arguments ( parser ) if len ( sys . argv ) < 2 : self . logger . error ( "Icetea called with no arguments! " ) parser . print_help ( ) return False elif not self . args . ignore_invalid_params and self . unknown : self . logger . e... | Validates that a valid number of arguments were received and that all arguments were recognised . |
25,798 | def _init_pluginmanager ( self ) : self . pluginmanager = PluginManager ( logger = self . logger ) self . logger . debug ( "Registering execution wide plugins:" ) self . pluginmanager . load_default_run_plugins ( ) self . pluginmanager . load_custom_run_plugins ( self . args . plugin_path ) self . logger . debug ( "Exe... | Initialize PluginManager and load run wide plugins . |
25,799 | def run ( self , args = None ) : retcodesummary = ExitCodes . EXIT_SUCCESS self . args = args if args else self . args if not self . check_args ( ) : return retcodesummary if self . args . clean : if not self . args . tc and not self . args . suite : return retcodesummary version = get_fw_version ( ) if self . args . v... | Runs the set of tests within the given path . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.