INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Convert input into a QuantumChannel subclass object or Operator object
def _init_transformer(cls, data): """Convert input into a QuantumChannel subclass object or Operator object""" # This handles common conversion for all QuantumChannel subclasses. # If the input is already a QuantumChannel subclass it will return # the original object if isinstanc...
If dag is mapped and the direction is correct the property is_direction_mapped is set to True ( or to False otherwise ).
def run(self, dag): """ If `dag` is mapped and the direction is correct the property `is_direction_mapped` is set to True (or to False otherwise). Args: dag (DAGCircuit): DAG to check. """ if self.layout is None: if self.property_set["layout"]: ...
Create Graphene Enum for sorting a SQLAlchemy class query
def sort_enum_for_model(cls, name=None, symbol_name=_symbol_name): """Create Graphene Enum for sorting a SQLAlchemy class query Parameters - cls : Sqlalchemy model class Model used to create the sort enumerator - name : str, optional, default None Name to use for the enumerator. If not ...
Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model. If has_default is True ( the default ) it will sort the result by the primary key ( s )
def sort_argument_for_model(cls, has_default=True): """Returns a Graphene argument for the sort field that accepts a list of sorting directions for a model. If `has_default` is True (the default) it will sort the result by the primary key(s) """ enum, default = _sort_enum_for_model(cls) if not has_d...
Find all substrings of the given string which represent date and/ or time and parse them.
def search_dates(text, languages=None, settings=None, add_detected_language=False): """Find all substrings of the given string which represent date and/or time and parse them. :param text: A string in a natural language which may contain date and/or time expressions. :type text: str|uni...
Monkey patching _strptime to avoid problems related with non - english locale changes on the system.
def patch_strptime(): """Monkey patching _strptime to avoid problems related with non-english locale changes on the system. For example, if system's locale is set to fr_FR. Parser won't recognize any date since all languages are translated to english dates. """ _strptime = imp.load_module( ...
Get an ordered mapping with locale codes as keys and corresponding locale instances as values.
def get_locale_map(self, languages=None, locales=None, region=None, use_given_order=False, allow_conflicting_locales=False): """ Get an ordered mapping with locale codes as keys and corresponding locale instances as values. :param languages: A list of ...
Yield locale instances.
def get_locales(self, languages=None, locales=None, region=None, use_given_order=False, allow_conflicting_locales=False): """ Yield locale instances. :param languages: A list of language codes, e.g. ['en', 'es', 'zh-Hant']. If locales are not given, l...
Check if tokens are valid tokens for the locale.
def are_tokens_valid(self, tokens): """ Check if tokens are valid tokens for the locale. :param tokens: a list of string or unicode tokens. :type tokens: list :return: True if tokens are valid, False otherwise. """ match_relative_regex = self._get_ma...
Split the date string using translations in locale info.
def split(self, string, keep_formatting=False): """ Split the date string using translations in locale info. :param string: Date string to be splitted. :type string: str|unicode :param keep_formatting: If True, retain formatting of the date s...
Find all substrings of the given string which represent date and/ or time and parse them.
def search_dates(self, text, languages=None, settings=None): """ Find all substrings of the given string which represent date and/or time and parse them. :param text: A string in a natural language which may contain date and/or time expressions. :type text: str|unicode ...
Parse date and time from given date string.
def parse(date_string, date_formats=None, languages=None, locales=None, region=None, settings=None): """Parse date and time from given date string. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :param date_formats: ...
Attemps to parse time part of date strings like 1 day ago 2 PM
def _parse_time(self, date_string, settings): """Attemps to parse time part of date strings like '1 day ago, 2 PM' """ date_string = PATTERN.sub('', date_string) date_string = re.sub(r'\b(?:ago|in)\b', '', date_string) try: return time_parser(date_string) except: ...
Check if the locale is applicable to translate date string.
def is_applicable(self, date_string, strip_timezone=False, settings=None): """ Check if the locale is applicable to translate date string. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :para...
Translate the date string to its English equivalent.
def translate(self, date_string, keep_formatting=False, settings=None): """ Translate the date string to its English equivalent. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :param keep_for...
Parse with formats and return a dictionary with period and obj_date.
def parse_with_formats(date_string, date_formats, settings): """ Parse with formats and return a dictionary with 'period' and 'obj_date'. :returns: :class:`datetime.datetime`, dict or None """ period = 'day' for date_format in date_formats: try: date_obj = datetime.strptime(dat...
Parse string representing date and/ or time in recognizable localized formats. Supports parsing multiple languages and timezones.
def get_date_data(self, date_string, date_formats=None): """ Parse string representing date and/or time in recognizable localized formats. Supports parsing multiple languages and timezones. :param date_string: A string representing date and/or time in a recognizably valid fo...
return load plan ( timestamps generator )
def get_load_plan(self): """ return load plan (timestamps generator) """ if self.rps_schedule and self.instances_schedule: raise StepperConfigurationError( 'Both rps and instances schedules specified. You must specify only one of them' ) el...
return ammo generator
def get_ammo_generator(self): """ return ammo generator """ af_readers = { 'phantom': missile.AmmoFileReader, 'slowlog': missile.SlowLogReader, 'line': missile.LineReader, 'uri': missile.UriReader, 'uripost': missile.UriPostRead...
translate http code to net code. if accertion failed set net code to 314
def _exc_to_net(param1, success): """ translate http code to net code. if accertion failed, set net code to 314 """ if len(param1) <= 3: # FIXME: we're unable to use better logic here, because we should support non-http codes # but, we should look for core.util.HTTP or some other common logic ...
translate exception str to http code
def _exc_to_http(param1): """ translate exception str to http code""" if len(param1) <= 3: try: int(param1) except BaseException: logger.error( "JMeter wrote some strange data into codes column: %s", param1) else: return int(param1) ...
Read phantom tool specific options
def read_config(self): """ Read phantom tool specific options """ self.threads = self.cfg["threads"] or str(int(multiprocessing.cpu_count() / 2) + 1) self.phantom_modules_path = self.cfg["phantom_modules_path"] self.additional_libs = ' '.join(self.cfg["additional_libs"]) ...
Generate phantom tool run config
def compose_config(self): """ Generate phantom tool run config """ streams_config = '' stat_benchmarks = '' for stream in self.streams: streams_config += stream.compose_config() if not stream.is_main: stat_benchmarks += " " + "benchma...
get merged info about phantom conf
def get_info(self): """ get merged info about phantom conf """ result = copy.copy(self.streams[0]) result.stat_log = self.stat_log result.steps = [] result.ammo_file = '' result.rps_schedule = None result.ammo_count = 0 result.duration = 0 result....
reads config
def read_config(self): """ reads config """ # multi-options self.ssl = self.get_option("ssl") self.tank_type = self.get_option("tank_type") # TODO: refactor. Maybe we should decide how to interact with # StepperWrapper here. # self.instances = self.get_option('ins...
compose benchmark block
def compose_config(self): """ compose benchmark block """ # step file self.stepper_wrapper.prepare_stepper() self.stpd = self.stepper_wrapper.stpd if self.stepper_wrapper.instances: self.instances = self.stepper_wrapper.instances if not self.stpd: ...
download remote resources replace links with local filenames add result file section: param dict config: pandora config
def patch_config(self, config): """ download remote resources, replace links with local filenames add result file section :param dict config: pandora config """ # get expvar parameters if config.get("monitoring"): if config["monitoring"].get("expvar"):...
2h 2h5m 5m 180 1h4m3: param duration:: return:
def validate_duration(self, field, duration): ''' 2h 2h5m 5m 180 1h4m3 :param duration: :return: ''' DURATION_RE = r'^(\d+d)?(\d+h)?(\d+m)?(\d+s?)?$' if not re.match(DURATION_RE, duration): self._error(field, 'Load durat...
step ( 10 200 5 180 ) step ( 5 50 2. 5 5m ) line ( 22 154 2h5m ) step ( 5 50 2. 5 5m ) line ( 22 154 2h5m ) const ( 10 1h4m3s ): param field:: param value:: return:
def _validator_load_scheme(self, field, value): ''' step(10,200,5,180) step(5,50,2.5,5m) line(22,154,2h5m) step(5,50,2.5,5m) line(22,154,2h5m) const(10,1h4m3s) :param field: :param value: :return: ''' # stpd file can be any value ...
: returns: [ ( plugin_name plugin_package plugin_config )... ]: rtype: list of tuple
def __parse_enabled_plugins(self): """ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple """ return [ ( plugin_name, plugin['package'], plugin) for plugin_name, plugin in sel...
: returns: [ ( plugin_name plugin_package plugin_config )... ]: rtype: list of tuple
def plugins(self): """ :returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple """ if not self._plugins: self._plugins = [ (plugin_name, plugin_cfg['package'], plugin_cfg) for plugin...
This function polls stdout and stderr streams and writes their contents to log
def log_stdout_stderr(log, stdout, stderr, comment=""): """ This function polls stdout and stderr streams and writes their contents to log """ readable = select.select([stdout], [], [], 0)[0] if stderr: exceptional = select.select([stderr], [], [], 0)[0] else: exceptional = [...
helper for above functions
def expand_time(str_time, default_unit='s', multiplier=1): """ helper for above functions """ parser = re.compile(r'(\d+)([a-zA-Z]*)') parts = parser.findall(str_time) result = 0.0 for value, unit in parts: value = int(value) unit = unit.lower() if unit == '': ...
Check whether pid exists in the current process table.
def pid_exists(pid): """Check whether pid exists in the current process table.""" if pid < 0: return False try: os.kill(pid, 0) except OSError as exc: logging.debug("No process[%s]: %s", exc.errno, exc) return exc.errno == errno.EPERM else: p = psutil.Process(...
>>> string = apple orange banana tree green >>> splitstring ( string ) [ apple orange green banana tree ]
def splitstring(string): """ >>> string = 'apple orange "banana tree" green' >>> splitstring(string) ['apple', 'orange', 'green', '"banana tree"'] """ patt = re.compile(r'"[\w ]+"') if patt.search(string): quoted_item = patt.search(string).group() newstring = patt.sub('', str...
Reads { _len } characters if _len is not None else reads line: param pos: start reading position: param _len: number of characters to read: rtype: ( string int )
def read_with_lock(self, pos, _len=None): """ Reads {_len} characters if _len is not None else reads line :param pos: start reading position :param _len: number of characters to read :rtype: (string, int) """ self.wait_lock() try: self._opened_...
get_option wrapper
def get_option(self, option, param2=None): ''' get_option wrapper''' result = self.cfg[option] self.log.debug( "Option %s = %s", option, result) return result
stepper part of reading options
def read_config(self): ''' stepper part of reading options ''' self.log.info("Configuring StepperWrapper...") self.ammo_file = self.get_option(self.OPTION_AMMOFILE) self.ammo_type = self.get_option('ammo_type') if self.ammo_file: self.ammo_file = os.path.expanduser(se...
Generate test data if necessary
def prepare_stepper(self): ''' Generate test data if necessary ''' def publish_info(stepper_info): info.status.publish('loadscheme', stepper_info.loadscheme) info.status.publish('loop_count', stepper_info.loop_count) info.status.publish('steps', stepper_info.steps) ...
Choose the name for stepped data file
def __get_stpd_filename(self): ''' Choose the name for stepped data file ''' if self.use_caching: sep = "|" hasher = hashlib.md5() hashed_str = "cache version 6" + sep + \ ';'.join(self.load_profile.schedule) + sep + str(self.loop_limit) ha...
Read stepper info from json
def __read_cached_options(self): ''' Read stepper info from json ''' self.log.debug("Reading cached stepper info: %s", self.__si_filename()) with open(self.__si_filename(), 'r') as si_file: si = info.StepperInfo(**json.load(si_file)) return si
Write stepper info to json
def __write_cached_options(self, si): ''' Write stepper info to json ''' self.log.debug("Saving stepper info: %s", self.__si_filename()) with open(self.__si_filename(), 'w') as si_file: json.dump(si._asdict(), si_file, indent=4)
stpd generation using Stepper class
def __make_stpd_file(self): ''' stpd generation using Stepper class ''' self.log.info("Making stpd-file: %s", self.stpd) stepper = Stepper( self.core, rps_schedule=self.load_profile.schedule if self.load_profile.is_rps() else None, http_ver=self.http_ver, ...
Create Load Plan as defined in schedule. Publish info about its duration.
def create(rps_schedule): """ Create Load Plan as defined in schedule. Publish info about its duration. """ if len(rps_schedule) > 1: lp = Composite( [StepFactory.produce(step_config) for step_config in rps_schedule]) else: lp = StepFactory.produce(rps_schedule[0]) in...
: param n: number of charge: return: when to shoot nth charge milliseconds
def ts(self, n): """ :param n: number of charge :return: when to shoot nth charge, milliseconds """ try: root1, root2 = solve_quadratic(self.slope / 2.0, self.minrps, -n) except ZeroDivisionError: root2 = float(n) / self.minrps return int(r...
Return rps for second t
def rps_at(self, t): '''Return rps for second t''' if 0 <= t <= self.duration: return self.minrps + \ float(self.maxrps - self.minrps) * t / self.duration else: return 0
get list of constant load parts ( we have no constant load at all but tank will think so ) with parts durations ( float )
def get_float_rps_list(self): ''' get list of constant load parts (we have no constant load at all, but tank will think so), with parts durations (float) ''' int_rps = range(int(self.minrps), int(self.maxrps) + 1) step_duration = float(self.duration) / len(int_rps) ...
get list of each second s rps: returns: list of tuples ( rps duration of corresponding rps in seconds ): rtype: list
def get_rps_list(self): """ get list of each second's rps :returns: list of tuples (rps, duration of corresponding rps in seconds) :rtype: list """ seconds = range(0, int(self.duration) + 1) rps_groups = groupby([proper_round(self.rps_at(t)) for t in seconds], ...
Execute and check exit code
def execute(self, cmd): """ Execute and check exit code """ self.log.info("Executing: %s", cmd) retcode = execute( cmd, shell=True, poll_period=0.1, catch_out=self.catch_out)[0] if retcode: raise RuntimeError("Subprocess returned %s" % retcode) ...
The reason why we have two separate methods for monitoring and aggregates is a strong difference in incoming data.
def decode_monitoring(self, data): """ The reason why we have two separate methods for monitoring and aggregates is a strong difference in incoming data. """ points = list() for second_data in data: for host, host_data in second_data["data"].iteritems(): ...
x Make a set of points for this label
def __make_points_for_label(self, ts, data, label, prefix, gun_stats): """x Make a set of points for `this` label overall_quantiles, overall_meta, net_codes, proto_codes, histograms """ label_points = list() label_points.extend( ( # overall q...
Parameters ---------- measurement: string measurement type ( e. g. monitoring overall_meta net_codes proto_codes overall_quantiles ) additional_tags: dict custom additional tags for this points ts: integer timestamp fields: dict influxdb columns
def __make_points(self, measurement, additional_tags, ts, fields): """ Parameters ---------- measurement : string measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles) additional_tags : dict custom additional tags for ...
publish value to status
def publish(self, key, value): """publish value to status""" self.log.debug( "Publishing status: %s/%s: %s", self.__class__.__name__, key, value) self.core.publish(self.__class__.__name__, key, value)
helper to aggregate codes by mask
def count_matched_codes(codes_regex, codes_dict): """ helper to aggregate codes by mask """ total = 0 for code, count in codes_dict.items(): if codes_regex.match(str(code)): total += count return total
Say the workers to finish their jobs and quit.
def stop(self): """ Say the workers to finish their jobs and quit. """ self.quit.set() # yapf:disable while sorted([ self.pool[i].is_alive() for i in xrange(len(self.pool))])[-1]: time.sleep(1) # yapf:enable try:...
A feeder that runs in distinct thread in main process.
def _feed(self): """ A feeder that runs in distinct thread in main process. """ self.plan = StpdReader(self.stpd_filename) if self.cached_stpd: self.plan = list(self.plan) for task in self.plan: if self.quit.is_set(): logger.info("S...
A worker that does actual jobs
def _worker(self): """ A worker that does actual jobs """ logger.debug("Init shooter process") try: self.gun.setup() except Exception: logger.exception("Couldn't initialize gun. Exit shooter process") return while not self.quit....
A worker that does actual jobs
def _green_worker(self): """ A worker that does actual jobs """ while not self.quit.is_set(): try: task = self.green_queue.get(timeout=1) timestamp, missile, marker = task planned_time = self.start_time + (timestamp / 1000.0) ...
Set up logging
def init_logging(self, log_filename="tank.log"): """ Set up logging """ logger = logging.getLogger('') self.log_filename = log_filename self.core.add_artifact_file(self.log_filename) file_handler = logging.FileHandler(self.log_filename) file_handler.setLevel(logging.DEBU...
override config options with user specified options
def __add_user_options(self): """ override config options with user specified options""" if self.options.get('user_options', None): self.core.apply_shorthand_options(self.options['user_options'])
Make preparations before running Tank
def configure(self, options): """ Make preparations before running Tank """ self.options = options if self.options.get('lock_dir', None): self.core.set_option(self.core.SECTION, "lock_dir", self.options['lock_dir']) if self.options.get('ignore_lock', None): self.c...
returns default configs list from/ etc home dir and package_data
def get_default_configs(self): """ returns default configs list, from /etc, home dir and package_data""" # initialize basic defaults configs = [resource_filename(__name__, 'config/00-base.ini')] try: conf_files = sorted(os.listdir(self.baseconfigs_location)) for f...
call shutdown routines
def __graceful_shutdown(self): """ call shutdown routines """ retcode = 1 self.log.info("Trying to shutdown gracefully...") retcode = self.core.plugins_end_test(retcode) retcode = self.core.plugins_post_process(retcode) self.log.info("Done graceful shutdown") retu...
Collect data cache it and send to listeners
def _collect_data(self, end=False): """ Collect data, cache it and send to listeners """ data = get_nowait_from_queue(self.results) stats = get_nowait_from_queue(self.stats_results) logger.debug("Data timestamps: %s" % [d.get('ts') for d in data]) logger.debug("St...
notify all listeners about aggregate data and stats
def __notify_listeners(self, data, stats): """ notify all listeners about aggregate data and stats """ for listener in self.listeners: listener.on_aggregated_data(data, stats)
Returns a marker function of the requested marker_type
def get_marker(marker_type, enum_ammo=False): ''' Returns a marker function of the requested marker_type >>> marker = get_marker('uniq')(__test_missile) >>> type(marker) <type 'str'> >>> len(marker) 32 >>> get_marker('uri')(__test_missile) '_example_search_hello_help_us' >>> m...
: type column_mapping: dict: type data_session: DataSession
def get_uploader(data_session, column_mapping, overall_only=False): """ :type column_mapping: dict :type data_session: DataSession """ overall = {col_name: data_session.new_aggregated_metric(name + ' overall') for col_name, name in column_mapping.items()} def upload_df(df): ...
: type path: str
def cfg_folder_loader(path): """ :type path: str """ CFG_WILDCARD = '*.yaml' return [load_cfg(filename) for filename in sorted(glob.glob(os.path.join(path, CFG_WILDCARD)))]
: type options: list of str: rtype: list of dict
def parse_options(options): """ :type options: list of str :rtype: list of dict """ if options is None: return [] else: return [ convert_single_option(key.strip(), value.strip()) for key, value in [option.split('=', 1) for option in options] ...
returns default configs list from/ etc and home dir
def get_default_configs(): """ returns default configs list, from /etc and home dir """ # initialize basic defaults configs = [resource_filename(__name__, 'config/00-base.ini')] baseconfigs_location = '/etc/yandex-tank' try: conf_files = sorted(os.listdir(baseconfigs_location)) for f...
clean markup from string
def clean_markup(self, orig_str): ''' clean markup from string ''' for val in [ self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA, self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN, self.RED_DARK, self.MAGENTA, self.BG_CYAN ]: orig_s...
Parse duration string such as 3h2m3s into milliseconds
def parse_duration(duration): ''' Parse duration string, such as '3h2m3s' into milliseconds >>> parse_duration('3h2m3s') 10923000 >>> parse_duration('0.3s') 300 >>> parse_duration('5') 5000 ''' _re_token = re.compile("([0-9.]+)([dhms]?)") def parse_token(time, multiplier)...
>>> solve_quadratic ( 1. 0 2. 0 1. 0 ) ( - 1. 0 - 1. 0 )
def solve_quadratic(a, b, c): ''' >>> solve_quadratic(1.0, 2.0, 1.0) (-1.0, -1.0) ''' discRoot = math.sqrt((b * b) - 4 * a * c) root1 = (-b - discRoot) / (2 * a) root2 = (-b + discRoot) / (2 * a) return (root1, root2)
rounds float to closest int: rtype: int: param n: float
def proper_round(n): """ rounds float to closest int :rtype: int :param n: float """ return int(n) + (n / abs(n)) * int(abs(n - int(n)) >= 0.5) if n != 0 else 0
Start local agent
def start(self): """Start local agent""" logger.info('Starting agent on localhost') args = self.python.split() + [ os.path.join( self.workdir, self.AGENT_FILENAME), '--telegraf', self.path['TELEGRAF_LOCAL_PATH'], '--...
Remove agent s files from remote host
def uninstall(self): """ Remove agent's files from remote host """ if self.session: logger.info('Waiting monitoring data...') self.session.terminate() self.session.wait() self.session = None log_filename = "agent_{host}.log".format(...
Create folder and copy agent and metrics scripts to remote host
def install(self): """Create folder and copy agent and metrics scripts to remote host""" logger.info( "Installing monitoring agent at %s@%s...", self.username, self.host) # create remote temp dir cmd = self.python + ' -c "import tempfile; print tempfi...
Start remote agent
def start(self): """Start remote agent""" logger.info('Starting agent: %s', self.host) command = "{python} {agent_path} --telegraf {telegraf_path} --host {host} {kill_old}".format( python=self.python, agent_path=os.path.join( self.path['AGENT_REMOTE_FOLDER...
Remove agent s files from remote host
def uninstall(self): """ Remove agent's files from remote host """ log_filename = "agent_{host}.log".format(host=self.host) data_filename = "agent_{host}.rawdata".format(host=self.host) try: if self.session: self.session.send("stop\n") ...
: type cfg_ini: ConfigParser
def parse_sections(cfg_ini): """ :type cfg_ini: ConfigParser """ return [Section(section.lower(), guess_plugin(section.lower()), without_defaults(cfg_ini, section)) for section in cfg_ini.sections() if not re.match(CORE_SECTION_PATTERN, sec...
: type sections: list of Section: rtype: list of Section
def combine_sections(sections): """ :type sections: list of Section :rtype: list of Section """ PLUGINS_TO_COMBINE = { 'Phantom': ('phantom', 'multi', True), 'Bfg': ('bfg', 'gun_config', False) } plugins = {} ready_sections = [] for section in sections: if sec...
: rtype: { str: object }
def converted(self): """ :rtype: {str: object} """ if self._converted is None: self._converted = self.converter(self.name, self.value) return self._converted
: rtype: ( str object )
def as_tuple(self): """ :rtype: (str, object) """ if self._as_tuple is None: self._as_tuple = self.converted.items()[0] return self._as_tuple
: rtype: callable
def converter(self): """ :rtype: callable """ if self._converter is None: try: self._converter = self.SPECIAL_CONVERTERS[self.plugin][self.name] except KeyError: try: self._converter = self._get_scheme_converter(...
: type parent_name: str: type sections: list of Section
def from_multiple(cls, sections, parent_name=None, child_name=None, is_list=True): """ :type parent_name: str :type sections: list of Section """ if len(sections) == 1: return sections[0] if parent_name: master_section = filter(lambda section: sect...
Underlines content with =. New lines and tabs will be replaced: param str content:: param str new_line_replacement:: param str tab_replacement:: return: unicode
def title(content, new_line_replacement=' ', tab_replacement=' '): """ Underlines content with '='. New lines and tabs will be replaced :param str content: :param str new_line_replacement: :param str tab_replacement: :return: unicode """ prepared_content ...
Searching for line in jmeter. log such as Waiting for possible shutdown message on port 4445
def __discover_jmeter_udp_port(self): """Searching for line in jmeter.log such as Waiting for possible shutdown message on port 4445 """ r = re.compile(self.DISCOVER_PORT_PATTERN) with open(self.process_stderr.name, 'r') as f: cnt = 0 while self.process.pi...
Genius idea by Alexey Lavrenyuk
def __add_jmeter_components(self, jmx, jtl, variables): """ Genius idea by Alexey Lavrenyuk """ logger.debug("Original JMX: %s", os.path.realpath(jmx)) with open(jmx, 'r') as src_jmx: source_lines = src_jmx.readlines() try: # In new Jmeter version (3.2 as example...
Gracefull termination of running process
def __terminate(self): """Gracefull termination of running process""" if self.__stderr_file: self.__stderr_file.close() if not self.__process: return waitfor = time.time() + _PROCESS_KILL_TIMEOUT while time.time() < waitfor: try: ...
Parse lines and return stats
def _read_data(self, lines): """ Parse lines and return stats """ results = [] for line in lines: timestamp, rps, instances = line.split("\t") curr_ts = int(float(timestamp)) # We allow floats here, but tank expects only seconds if self.__las...
instantiate criterion from config string
def __create_criterion(self, criterion_str): """ instantiate criterion from config string """ parsed = criterion_str.split("(") type_str = parsed[0].strip().lower() parsed[1] = parsed[1].split(")")[0].strip() for criterion_class in self.custom_criterions: if criterio...
Prepare config data.
def getconfig(self, filename, target_hint): """Prepare config data.""" try: tree = self.parse_xml(filename) except IOError as exc: logger.error("Error loading config: %s", exc) raise RuntimeError("Can't read monitoring config %s" % filename) hosts = tr...
Startup and shutdown commands config Used by agent. py on the target
def create_startup_config(self): """ Startup and shutdown commands config Used by agent.py on the target """ cfg_path = "agent_startup_{}.cfg".format(self.host) if os.path.isfile(cfg_path): logger.info( 'Found agent startup config file in working dire...
bash script w/ custom commands inside inspired by half a night trying to avoid escaping bash special characters
def create_custom_exec_script(self): """ bash script w/ custom commands inside inspired by half a night trying to avoid escaping bash special characters """ cfg_path = "agent_customs_{}.cfg".format(self.host) if os.path.isfile(cfg_path): logger.info( ...
Telegraf collector config toml format
def create_collector_config(self, workdir): """ Telegraf collector config, toml format """ cfg_path = "agent_collector_{}.cfg".format(self.host) if os.path.isfile(cfg_path): logger.info( 'Found agent config file in working directory with the same name...
raise exception on disk space exceeded
def __check_disk(self): ''' raise exception on disk space exceeded ''' cmd = "sh -c \"df --no-sync -m -P -l -x fuse -x tmpfs -x devtmpfs -x davfs -x nfs " cmd += self.core.artifacts_base_dir cmd += " | tail -n 1 | awk '{print \$4}' \"" res = execute(cmd, True, 0.1, True) ...
raise exception on RAM exceeded
def __check_mem(self): ''' raise exception on RAM exceeded ''' mem_free = psutil.virtual_memory().available / 2**20 self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit) if mem_free < self.mem_limit: raise RuntimeError( "Not enough resources: free mem...
Gets width and height of terminal viewport
def get_terminal_size(): ''' Gets width and height of terminal viewport ''' default_size = (30, 120) env = os.environ def ioctl_gwinsz(file_d): ''' Helper to get console size ''' try: sizes = struct.unpack( 'hh', fcntl.ioctl(file_d, te...
Gets next line for right panel
def __get_right_line(self, widget_output): ''' Gets next line for right panel ''' right_line = '' if widget_output: right_line = widget_output.pop(0) if len(right_line) > self.right_panel_width: right_line_plain = self.markup.clean_markup(rig...
Cut tuple of line chunks according to it s wisible lenght
def __truncate(self, line_arr, max_width): ''' Cut tuple of line chunks according to it's wisible lenght ''' def is_space(chunk): return all([True if i == ' ' else False for i in chunk]) def is_empty(chunks, markups): result = [] for chunk in chunks: ...