code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
import re
from robot import utils
from .formatters import _DataFileFormatter
class HtmlFormatter(_DataFileFormatter):
_split_multiline_doc = False
def _format_row(self, row, table=None):
row = self._pad(self._escape_consecutive_whitespace(row), table)
if self._is_documentation_row(row):
return self._create_documentation_row(row)
first_cell = self._create_first_cell(row[0], table)
if self._is_indented_documentation_row(row[1:], table):
return self._create_indented_documentation_row(first_cell, row[1:])
return [first_cell] + [HtmlCell(c) for c in row[1:]]
def _is_documentation_row(self, row):
return row[0] == 'Documentation'
def _create_documentation_row(self, row):
return [NameCell(row[0]), DocumentationCell(row[1], span=self._column_count-1)]
def _is_indented_documentation_row(self, cells, table):
return self._is_indented_table(table) and cells and \
cells[0] == '[Documentation]'
def _create_indented_documentation_row(self, first_cell, cells):
start = [first_cell, HtmlCell(cells[0])]
if any(c.startswith('#') for c in cells):
return start + [HtmlCell(c) for c in cells[1:]]
return start + [DocumentationCell(cells[1], self._column_count-2)]
def _create_first_cell(self, cell, table):
if self._is_indented_table(table) and cell:
return AnchorNameCell(cell, 'keyword' if table.type == 'keyword'
else 'test')
return NameCell(cell)
def format_header(self, table):
if not self._should_align_columns(table) or len(table.header) == 1:
return [HeaderCell(table.header[0], self._column_count)]
headers = self._pad_header(table)
return [HeaderCell(hdr) for hdr in headers]
def _pad_header(self, table):
header = table.header
return header + [''] * (self._get_column_count(table) - len(header))
def _pad(self, row, table):
return row + [''] * (self._get_column_count(table) - len(row))
def _get_column_count(self, table):
if table is None or len(table.header) == 1 \
or not self._is_indented_table(table):
return self._column_count
return max(self._max_column_count(table), len(table.header))
def _max_column_count(self, table):
count = 0
for item in table:
for child in item:
count = max(count, len(child.as_list()) + 1)
return count
class HtmlCell(object):
_backslash_matcher = re.compile(r'(\\+)n ?')
def __init__(self, content='', attributes=None, tag='td', escape=True):
if escape:
content = utils.html_escape(content)
self.content = self._replace_newlines(content)
self.attributes = attributes or {}
self.tag = tag
def _replace_newlines(self, content):
def replacer(match):
backslash_count = len(match.group(1))
if backslash_count % 2 == 1:
return '%sn<br>\n' % match.group(1)
return match.group()
return self._backslash_matcher.sub(replacer, content)
class NameCell(HtmlCell):
def __init__(self, name='', attributes=None):
HtmlCell.__init__(self, name, {'class': 'name'})
class AnchorNameCell(HtmlCell):
def __init__(self, name, type_):
HtmlCell.__init__(self, self._link_from_name(name, type_),
{'class': 'name'}, escape=False)
def _link_from_name(self, name, type_):
return '<a name="%s_%s">%s</a>' % (type_, utils.attribute_escape(name),
utils.html_escape(name))
class DocumentationCell(HtmlCell):
def __init__(self, content, span):
HtmlCell.__init__(self, content, {'class': 'colspan%d' % span,
'colspan': '%d' % span})
class HeaderCell(HtmlCell):
def __init__(self, name, span=1):
HtmlCell.__init__(self, name, {'class': 'name', 'colspan': '%d' % span},
tag='th') | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/writer/htmlformatter.py | 0.491212 | 0.333422 | htmlformatter.py | pypi |
import re
from .aligners import FirstColumnAligner, ColumnAligner, NullAligner
from .dataextractor import DataExtractor
from .rowsplitter import RowSplitter
class _DataFileFormatter(object):
_whitespace = re.compile('\s{2,}')
_split_multiline_doc = True
def __init__(self, column_count):
self._splitter = RowSplitter(column_count, self._split_multiline_doc)
self._column_count = column_count
self._extractor = DataExtractor(self._want_names_on_first_content_row)
def _want_names_on_first_content_row(self, table, name):
return True
def empty_row_after(self, table):
return self._format_row([], table)
def format_header(self, table):
header = self._format_row(table.header)
return self._format_header(header, table)
def format_table(self, table):
rows = self._extractor.rows_from_table(table)
if self._should_split_rows(table):
rows = self._split_rows(rows, table)
return (self._format_row(r, table) for r in rows)
def _should_split_rows(self, table):
return not self._should_align_columns(table)
def _split_rows(self, original_rows, table):
for original in original_rows:
for split in self._splitter.split(original, table.type):
yield split
def _should_align_columns(self, table):
return self._is_indented_table(table) and bool(table.header[1:])
def _is_indented_table(self, table):
return table is not None and table.type in ['test case', 'keyword']
def _escape_consecutive_whitespace(self, row):
return [self._whitespace.sub(self._whitespace_escaper,
cell.replace('\n', ' ')) for cell in row]
def _whitespace_escaper(self, match):
return '\\'.join(match.group(0))
def _format_row(self, row, table=None):
raise NotImplementedError
def _format_header(self, header, table):
raise NotImplementedError
class TsvFormatter(_DataFileFormatter):
def _format_header(self, header, table):
return [self._format_header_cell(cell) for cell in header]
def _format_header_cell(self, cell):
return '*%s*' % cell if cell else ''
def _format_row(self, row, table=None):
return self._pad(self._escape(row))
def _escape(self, row):
return self._escape_consecutive_whitespace(self._escape_tabs(row))
def _escape_tabs(self, row):
return [c.replace('\t', '\\t') for c in row]
def _pad(self, row):
row = [cell.replace('\n', ' ') for cell in row]
return row + [''] * (self._column_count - len(row))
class TxtFormatter(_DataFileFormatter):
_test_or_keyword_name_width = 18
_setting_and_variable_name_width = 14
def _format_row(self, row, table=None):
row = self._escape(row)
aligner = self._aligner_for(table)
return aligner.align_row(row)
def _aligner_for(self, table):
if table and table.type in ['setting', 'variable']:
return FirstColumnAligner(self._setting_and_variable_name_width)
if self._should_align_columns(table):
return ColumnAligner(self._test_or_keyword_name_width, table)
return NullAligner()
def _format_header(self, header, table):
header = ['*** %s ***' % header[0]] + header[1:]
aligner = self._aligner_for(table)
return aligner.align_row(header)
def _want_names_on_first_content_row(self, table, name):
return self._should_align_columns(table) and \
len(name) <= self._test_or_keyword_name_width
def _escape(self, row):
if not row:
return row
return self._escape_cells(self._escape_consecutive_whitespace(row))
def _escape_cells(self, row):
return [row[0]] + [self._escape_empty(cell) for cell in row[1:]]
def _escape_empty(self, cell):
return cell or '\\'
class PipeFormatter(TxtFormatter):
def _escape_cells(self, row):
return [self._escape_empty(self._escape_pipes(cell)) for cell in row]
def _escape_empty(self, cell):
return cell or ' '
def _escape_pipes(self, cell):
if ' | ' in cell:
cell = cell.replace(' | ', ' \\| ')
if cell.startswith('| '):
cell = '\\' + cell
if cell.endswith(' |'):
cell = cell[:-1] + '\\|'
return cell | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/writer/formatters.py | 0.565059 | 0.278846 | formatters.py | pypi |
from six import PY3
import os
import sys
from robot.errors import DataError
from .filewriters import FileWriter
class DataFileWriter(object):
"""Object to write parsed test data file objects back to disk."""
def __init__(self, **options):
"""
:param `**options`: A :class:`.WritingContext` is created based on these.
"""
self._options = options
def write(self, datafile):
"""Writes given `datafile` using `**options`.
:param datafile: The parsed test data object to be written
:type datafile: :py:class:`~robot.parsing.model.TestCaseFile`,
:py:class:`~robot.parsing.model.ResourceFile`,
:py:class:`~robot.parsing.model.TestDataDirectory`
"""
with WritingContext(datafile, **self._options) as ctx:
FileWriter(ctx).write(datafile)
class WritingContext(object):
"""Contains configuration used in writing a test data file to disk."""
encoding = 'UTF-8'
txt_format = 'txt'
html_format = 'html'
tsv_format = 'tsv'
robot_format = 'robot'
txt_column_count = 8
html_column_count = 5
tsv_column_count = 8
_formats = [txt_format, html_format, tsv_format, robot_format]
def __init__(self, datafile, format='', output=None, pipe_separated=False,
txt_separating_spaces=4, line_separator='\n'):
"""
:param datafile: The datafile to be written.
:type datafile: :py:class:`~robot.parsing.model.TestCaseFile`,
:py:class:`~robot.parsing.model.ResourceFile`,
:py:class:`~robot.parsing.model.TestDataDirectory`
:param str format: Output file format. If omitted, read from the
extension of the `source` attribute of the given `datafile`.
:param output: An open, file-like object used in writing. If
omitted, value of `source` attribute of the given `datafile` is
used to construct a new file object.
:param bool pipe_separated: Whether to use pipes as separator when
output file format is txt.
:param int txt_separating_spaces: Number of separating spaces between
cells in space separated format.
:param str line_separator: Line separator used in output files.
If `output` is not given, an output file is created based on the source
of the given datafile and value of `format`. Examples:
Write output in a StringIO instance using format of `datafile.source`::
WriteConfiguration(datafile, output=StringIO)
Output file is created from `datafile.source` by stripping extension
and replacing it with `html`::
WriteConfiguration(datafile, format='html')
"""
self.datafile = datafile
self.pipe_separated = pipe_separated
self.line_separator = line_separator
self._given_output = output
self.format = self._validate_format(format) or self._format_from_file()
self.txt_separating_spaces = txt_separating_spaces
self.output = output
def __enter__(self):
if not self.output:
# In Python 3, open with 'wb' only accepts bytes data,
# which causes TypeErrors at other points
mode = 'w' if PY3 else 'wb'
self.output = open(self._output_path(), mode)
return self
def __exit__(self, *exc_info):
if self._given_output is None:
self.output.close()
def _validate_format(self, format):
format = format.lower() if format else ''
if format and format not in self._formats:
raise DataError('Invalid format: %s' % format)
return format
def _format_from_file(self):
return self._format_from_extension(self._source_from_file())
def _format_from_extension(self, path):
return os.path.splitext(path)[1][1:].lower()
def _output_path(self):
return '%s.%s' % (self._base_name(), self.format)
def _base_name(self):
return os.path.splitext(self._source_from_file())[0]
def _source_from_file(self):
return getattr(self.datafile, 'initfile', self.datafile.source) | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/writer/datafilewriter.py | 0.764716 | 0.398465 | datafilewriter.py | pypi |
from six import PY3
import sys
from robot.utils import Matcher, NormalizedDict, is_string, setter, unic
class Tags(object):
def __init__(self, tags=None):
self._tags = tags
@setter
def _tags(self, tags):
if not tags:
return ()
if is_string(tags):
tags = (tags,)
return self._normalize(tags)
def _normalize(self, tags):
normalized = NormalizedDict(((unic(t), 1) for t in tags), ignore='_')
for removed in '', 'NONE':
if removed in normalized:
normalized.pop(removed)
return tuple(normalized)
def add(self, tags):
self._tags = tuple(self) + tuple(Tags(tags))
def remove(self, tags):
tags = TagPatterns(tags)
self._tags = [t for t in self if not tags.match(t)]
def match(self, tags):
return TagPatterns(tags).match(self)
def __contains__(self, tags):
return self.match(tags)
def __len__(self):
return len(self._tags)
def __iter__(self):
return iter(self._tags)
def __unicode__(self):
return u'[%s]' % ', '.join(self)
def __repr__(self):
return repr(list(self))
def __str__(self):
if PY3:
return self.__unicode__()
return unicode(self).encode('UTF-8')
#PY3
def __bytes__(self):
return str(self).encode('UTF-8')
def __getitem__(self, index):
item = self._tags[index]
return item if not isinstance(index, slice) else Tags(item)
def __add__(self, other):
return Tags(tuple(self) + tuple(Tags(other)))
class TagPatterns(object):
def __init__(self, patterns):
self._patterns = tuple(TagPattern(p) for p in Tags(patterns))
def match(self, tags):
tags = tags if isinstance(tags, Tags) else Tags(tags)
return any(p.match(tags) for p in self._patterns)
def __contains__(self, tag):
return self.match(tag)
def __len__(self):
return len(self._patterns)
def __iter__(self):
return iter(self._patterns)
def __getitem__(self, index):
return self._patterns[index]
def TagPattern(pattern):
if 'NOT' in pattern:
return _NotTagPattern(*pattern.split('NOT'))
if 'OR' in pattern:
return _OrTagPattern(pattern.split('OR'))
if 'AND' in pattern or '&' in pattern:
return _AndTagPattern(pattern.replace('&', 'AND').split('AND'))
return _SingleTagPattern(pattern)
class _SingleTagPattern(object):
def __init__(self, pattern):
self._matcher = Matcher(pattern, ignore='_')
def match(self, tags):
return self._matcher.match_any(tags)
def __unicode__(self):
return self._matcher.pattern
if PY3:
def __str__(self):
return self.__unicode__()
def __bool__(self):
return bool(self._matcher)
#PY2
def __nonzero__(self):
return self.__bool__()
class _AndTagPattern(object):
def __init__(self, patterns):
self._patterns = tuple(TagPattern(p) for p in patterns)
def match(self, tags):
return all(p.match(tags) for p in self._patterns)
class _OrTagPattern(object):
def __init__(self, patterns):
self._patterns = tuple(TagPattern(p) for p in patterns)
def match(self, tags):
return any(p.match(tags) for p in self._patterns)
class _NotTagPattern(object):
def __init__(self, must_match, *must_not_match):
self._first = TagPattern(must_match)
self._rest = _OrTagPattern(must_not_match)
def match(self, tags):
if not self._first:
return not self._rest.match(tags)
return self._first.match(tags) and not self._rest.match(tags) | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/model/tags.py | 0.52829 | 0.210198 | tags.py | pypi |
import re
from itertools import chain
from robot.utils import NormalizedDict
from .criticality import Criticality
from .stats import TagStat, CombinedTagStat
from .tags import TagPatterns
class TagStatistics(object):
"""Container for tag statistics.
"""
def __init__(self, combined_stats):
#: Dictionary, where key is the name of the tag as a string and value
#: is an instance of :class:`~robot.model.stats.TagStat`.
self.tags = NormalizedDict(ignore=['_'])
#: Dictionary, where key is the name of the created tag as a string
# and value is an instance of :class:`~robot.model.stats.TagStat`.
self.combined = combined_stats
def visit(self, visitor):
visitor.visit_tag_statistics(self)
def __iter__(self):
return iter(sorted(chain(self.tags.values(), self.combined)))
class TagStatisticsBuilder(object):
def __init__(self, criticality=None, included=None, excluded=None,
combined=None, docs=None, links=None):
self._included = TagPatterns(included)
self._excluded = TagPatterns(excluded)
self._info = TagStatInfo(criticality, docs, links)
self.stats = TagStatistics(self._info.get_combined_stats(combined))
def add_test(self, test):
self._add_tags_to_statistics(test)
self._add_to_combined_statistics(test)
def _add_tags_to_statistics(self, test):
for tag in test.tags:
if self._is_included(tag):
if tag not in self.stats.tags:
self.stats.tags[tag] = self._info.get_stat(tag)
self.stats.tags[tag].add_test(test)
def _is_included(self, tag):
if self._included and not self._included.match(tag):
return False
return not self._excluded.match(tag)
def _add_to_combined_statistics(self, test):
for comb in self.stats.combined:
if comb.match(test.tags):
comb.add_test(test)
class TagStatInfo(object):
def __init__(self, criticality=None, docs=None, links=None):
self._criticality = criticality or Criticality()
self._docs = [TagStatDoc(*doc) for doc in docs or []]
self._links = [TagStatLink(*link) for link in links or []]
def get_stat(self, tag):
return TagStat(tag, self.get_doc(tag), self.get_links(tag),
self._criticality.tag_is_critical(tag),
self._criticality.tag_is_non_critical(tag))
def get_combined_stats(self, combined=None):
return [self.get_combined_stat(*comb) for comb in combined or []]
def get_combined_stat(self, pattern, name=None):
name = name or pattern
return CombinedTagStat(pattern, name, self.get_doc(name),
self.get_links(name))
def get_doc(self, tag):
return ' & '.join(doc.text for doc in self._docs if doc.match(tag))
def get_links(self, tag):
return [link.get_link(tag) for link in self._links if link.match(tag)]
class TagStatDoc(object):
def __init__(self, pattern, doc):
self._matcher = TagPatterns(pattern)
self.text = doc
def match(self, tag):
return self._matcher.match(tag)
class TagStatLink(object):
_match_pattern_tokenizer = re.compile('(\*|\?+)')
def __init__(self, pattern, link, title):
self._regexp = self._get_match_regexp(pattern)
self._link = link
self._title = title.replace('_', ' ')
def match(self, tag):
return self._regexp.match(tag) is not None
def get_link(self, tag):
match = self._regexp.match(tag)
if not match:
return None
link, title = self._replace_groups(self._link, self._title, match)
return link, title
def _replace_groups(self, link, title, match):
for index, group in enumerate(match.groups()):
placefolder = '%%%d' % (index+1)
link = link.replace(placefolder, group)
title = title.replace(placefolder, group)
return link, title
def _get_match_regexp(self, pattern):
pattern = '^%s$' % ''.join(self._yield_match_pattern(pattern))
return re.compile(pattern, re.IGNORECASE)
def _yield_match_pattern(self, pattern):
for token in self._match_pattern_tokenizer.split(pattern):
if token.startswith('?'):
yield '(%s)' % ('.'*len(token))
elif token == '*':
yield '(.*)'
else:
yield re.escape(token) | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/model/tagstatistics.py | 0.755907 | 0.23304 | tagstatistics.py | pypi |
from robot import utils
from robot.errors import DataError
from .visitor import SuiteVisitor
class SuiteConfigurer(SuiteVisitor):
def __init__(self, name=None, doc=None, metadata=None, set_tags=None,
include_tags=None, exclude_tags=None, include_suites=None,
include_tests=None, empty_suite_ok=False):
self.name = name
self.doc = doc
self.metadata = metadata
self.set_tags = set_tags or []
self.include_tags = include_tags
self.exclude_tags = exclude_tags
self.include_suites = include_suites
self.include_tests = include_tests
self.empty_suite_ok = empty_suite_ok
@property
def add_tags(self):
return [t for t in self.set_tags if not t.startswith('-')]
@property
def remove_tags(self):
return [t[1:] for t in self.set_tags if t.startswith('-')]
def visit_suite(self, suite):
self._set_suite_attributes(suite)
self._filter(suite)
suite.set_tags(self.add_tags, self.remove_tags)
def _set_suite_attributes(self, suite):
if self.name:
suite.name = self.name
if self.doc:
suite.doc = self.doc
if self.metadata:
suite.metadata.update(self.metadata)
def _filter(self, suite):
name = suite.name
suite.filter(self.include_suites, self.include_tests,
self.include_tags, self.exclude_tags)
if not (suite.test_count or self.empty_suite_ok):
self._raise_no_tests_error(name)
def _raise_no_tests_error(self, suite):
selectors = '%s %s' % (self._get_test_selector_msgs(),
self._get_suite_selector_msg())
msg = "Suite '%s' contains no tests %s" % (suite, selectors.strip())
raise DataError(msg.strip() + '.')
def _get_test_selector_msgs(self):
parts = []
for explanation, selector in [('with tags', self.include_tags),
('without tags', self.exclude_tags),
('named', self.include_tests)]:
if selector:
parts.append(self._format_selector_msg(explanation, selector))
return utils.seq2str(parts, quote='')
def _format_selector_msg(self, explanation, selector):
if len(selector) == 1 and explanation[-1] == 's':
explanation = explanation[:-1]
return '%s %s' % (explanation, utils.seq2str(selector, lastsep=' or '))
def _get_suite_selector_msg(self):
if not self.include_suites:
return ''
return self._format_selector_msg('in suites', self.include_suites) | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/model/configurer.py | 0.702224 | 0.165054 | configurer.py | pypi |
from robot.utils import setter
from .tags import TagPatterns
from .namepatterns import SuiteNamePatterns, TestNamePatterns
from .visitor import SuiteVisitor
class EmptySuiteRemover(SuiteVisitor):
def end_suite(self, suite):
suite.suites = [s for s in suite.suites if s.test_count]
def visit_test(self, test):
pass
def visit_keyword(self, kw):
pass
class Filter(EmptySuiteRemover):
def __init__(self, include_suites=None, include_tests=None,
include_tags=None, exclude_tags=None):
self.include_suites = include_suites
self.include_tests = include_tests
self.include_tags = include_tags
self.exclude_tags = exclude_tags
@setter
def include_suites(self, suites):
return SuiteNamePatterns(suites) \
if not isinstance(suites, SuiteNamePatterns) else suites
@setter
def include_tests(self, tests):
return TestNamePatterns(tests) \
if not isinstance(tests, TestNamePatterns) else tests
@setter
def include_tags(self, tags):
return TagPatterns(tags) if not isinstance(tags, TagPatterns) else tags
@setter
def exclude_tags(self, tags):
return TagPatterns(tags) if not isinstance(tags, TagPatterns) else tags
def start_suite(self, suite):
if not self:
return False
if hasattr(suite, 'starttime'):
suite.starttime = suite.endtime = None
if self.include_suites:
return self._filter_by_suite_name(suite)
if self.include_tests:
suite.tests = self._filter(suite, self._included_by_test_name)
if self.include_tags:
suite.tests = self._filter(suite, self._included_by_tags)
if self.exclude_tags:
suite.tests = self._filter(suite, self._not_excluded_by_tags)
return bool(suite.suites)
def _filter_by_suite_name(self, suite):
if self.include_suites.match(suite.name, suite.longname):
suite.visit(Filter(include_suites=[],
include_tests=self.include_tests,
include_tags=self.include_tags,
exclude_tags=self.exclude_tags))
return False
suite.tests = []
return True
def _filter(self, suite, filter):
return [t for t in suite.tests if filter(t)]
def _included_by_test_name(self, test):
return self.include_tests.match(test.name, test.longname)
def _included_by_tags(self, test):
return self.include_tags.match(test.tags)
def _not_excluded_by_tags(self, test):
return not self.exclude_tags.match(test.tags)
def __bool__(self):
return bool(self.include_suites or self.include_tests or
self.include_tags or self.exclude_tags)
#PY2
def __nonzero__(self):
return self.__bool__() | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/model/filter.py | 0.629661 | 0.16896 | filter.py | pypi |
class SuiteVisitor(object):
"""Abstract class to ease traversing through the test suite structure.
See the :mod:`module level <robot.model.visitor>` documentation for more
information and an example.
"""
def visit_suite(self, suite):
"""Implements traversing through the suite and its direct children.
Can be overridden to allow modifying the passed in ``suite`` without
calling :func:`start_suite` or :func:`end_suite` nor visiting child
suites, tests or keywords (setup and teardown) at all.
"""
if self.start_suite(suite) is not False:
suite.keywords.visit(self)
suite.suites.visit(self)
suite.tests.visit(self)
self.end_suite(suite)
def start_suite(self, suite):
"""Called when suite starts. Default implementation does nothing.
Can return explicit ``False`` to stop visiting.
"""
pass
def end_suite(self, suite):
"""Called when suite ends. Default implementation does nothing."""
pass
def visit_test(self, test):
"""Implements traversing through the test and its keywords.
Can be overridden to allow modifying the passed in ``test`` without
calling :func:`start_test` or :func:`end_test` nor visiting keywords.
"""
if self.start_test(test) is not False:
test.keywords.visit(self)
self.end_test(test)
def start_test(self, test):
"""Called when test starts. Default implementation does nothing.
Can return explicit ``False`` to stop visiting.
"""
pass
def end_test(self, test):
"""Called when test ends. Default implementation does nothing."""
pass
def visit_keyword(self, kw):
"""Implements traversing through the keyword and its child keywords.
Can be overridden to allow modifying the passed in ``kw`` without
calling :func:`start_keyword` or :func:`end_keyword` nor visiting
child keywords.
"""
if self.start_keyword(kw) is not False:
kw.keywords.visit(self)
kw.messages.visit(self)
self.end_keyword(kw)
def start_keyword(self, keyword):
"""Called when keyword starts. Default implementation does nothing.
Can return explicit ``False`` to stop visiting.
"""
pass
def end_keyword(self, keyword):
"""Called when keyword ends. Default implementation does nothing."""
pass
def visit_message(self, msg):
"""Implements visiting the message.
Can be overridden to allow modifying the passed in ``msg`` without
calling :func:`start_message` or :func:`end_message`.
"""
if self.start_message(msg) is not False:
self.end_message(msg)
def start_message(self, msg):
"""Called when message starts. Default implementation does nothing.
Can return explicit ``False`` to stop visiting.
"""
pass
def end_message(self, msg):
"""Called when message ends. Default implementation does nothing."""
pass | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/src/robot/model/visitor.py | 0.905994 | 0.599514 | visitor.py | pypi |
describe("Searching by tags", function () {
it("should find tags by name", function () {
expect(model.containsTag(['name'], 'name')).toBeTruthy();
expect(model.containsTag(['x', 'y', 'z'], 'y')).toBeTruthy();
expect(model.containsTag([], 'name')).not.toBeTruthy();
expect(model.containsTag(['x', 'y'], 'notthere')).not.toBeTruthy();
});
it("should find tags case insensitively", function() {
expect(model.containsTag(['name'], 'Name')).toBeTruthy();
expect(model.containsTag(['NaMe'], 'namE')).toBeTruthy();
});
it("should find tags space insensitively", function() {
expect(model.containsTag(['xx', 'yy', 'zz'], 'y y')).toBeTruthy();
expect(model.containsTag(['x x', 'y y', 'z z'], 'XX')).toBeTruthy();
});
it("should find tags underscore insensitively", function() {
expect(model.containsTagPattern(['a_a_1', 'x'], 'a_a_*')).toBeTruthy();
expect(model.containsTagPattern(['a_a_1', 'x'], 'a a *')).toBeTruthy();
expect(model.containsTagPattern(['a a 1', 'x'], '_a__a__*_')).toBeTruthy();
});
it("should find tags with patterns * and ?", function() {
expect(model.containsTagPattern(['x', 'y'], 'x*')).toBeTruthy();
expect(model.containsTagPattern(['xxxyyy'], 'x*')).toBeTruthy();
expect(model.containsTagPattern(['xyz'], 'x?z')).toBeTruthy();
expect(model.containsTagPattern(['-x-'], '*x*')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x')).toBeTruthy();
});
it("should find tags combined with AND", function() {
expect(model.containsTagPattern(['x', 'y'], 'xANDy')).toBeTruthy();
expect(model.containsTagPattern(['xx', 'Yy', 'ZZ'], 'Y Y AND X X AND zz')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'xxANDy')).not.toBeTruthy();
});
it("should find tags combined with OR", function() {
expect(model.containsTagPattern(['x', 'y'], 'xORy')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'xORz')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'z OR zz OR X')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'xxORyy')).not.toBeTruthy();
});
it("should find tags combined with OR and AND", function() {
expect(model.containsTagPattern(['x', 'y'], 'x OR y AND z')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'z OR y AND x')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x AND y OR z')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'z AND y OR x')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x AND z OR x AND y')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x OR z AND x OR y')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x AND z OR y AND z')).not.toBeTruthy();
});
it("should find tags combined with NOT", function() {
expect(model.containsTagPattern(['x', 'y'], 'xNOTz')).toBeTruthy();
expect(model.containsTagPattern(['X X', 'Y Y'], 'xx NOT yy')).not.toBeTruthy();
expect(model.containsTagPattern(['xx'], 'NOTyy')).toBeTruthy();
expect(model.containsTagPattern([], 'NOTyy')).toBeTruthy();
expect(model.containsTagPattern([], ' NOT yy')).toBeTruthy();
expect(model.containsTagPattern(['yy'], ' NOT yy')).not.toBeTruthy();
});
it("should find tags combined with multiple NOTs", function() {
expect(model.containsTagPattern(['a'], 'a NOT c NOT d')).toBeTruthy();
expect(model.containsTagPattern(['a', 'b'], 'a NOT c NOT d')).toBeTruthy();
expect(model.containsTagPattern(['a', 'b'], 'a NOT b NOT c')).not.toBeTruthy();
expect(model.containsTagPattern(['a', 'b', 'c'], 'a NOT b NOT c')).not.toBeTruthy();
expect(model.containsTagPattern(['x'], 'a NOT c NOT d')).not.toBeTruthy();
});
it("should find tags combined with NOT and AND", function() {
expect(model.containsTagPattern(['x', 'y', 'z'], 'x NOT y AND z')).not.toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x NOT y AND z')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x NOT z AND y')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x AND y NOT z')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y', 'z'], 'x AND y NOT z')).not.toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x AND y NOT x AND z')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y', 'z'], 'x AND y NOT x AND z NOT y AND z')).not.toBeTruthy();
expect(model.containsTagPattern(['x', 'y', 'z'], 'x AND y NOT x AND z NOT xxx')).not.toBeTruthy();
});
it("should find tags combined with NOT and OR", function() {
expect(model.containsTagPattern(['a'], 'a NOT c OR d')).toBeTruthy();
expect(model.containsTagPattern(['a', 'b'], 'a NOT c OR d')).toBeTruthy();
expect(model.containsTagPattern(['a', 'b'], 'a NOT b OR c')).not.toBeTruthy();
expect(model.containsTagPattern(['a', 'b', 'c'], 'a NOT b OR c')).not.toBeTruthy();
expect(model.containsTagPattern(['x'], 'a NOT c OR d')).not.toBeTruthy();
expect(model.containsTagPattern(['x'], 'a OR x NOT b')).toBeTruthy();
expect(model.containsTagPattern(['x', 'y'], 'x OR a NOT y')).not.toBeTruthy();
});
it("should find tags combined with patterns and AND and NOT", function() {
expect(model.containsTagPattern(['xx', 'yy'], 'x* AND y?')).toBeTruthy();
expect(model.containsTagPattern(['xxxyyy'], 'x* NOT y*')).toBeTruthy();
expect(model.containsTagPattern(['xxxyyy'], 'x* NOT *y')).not.toBeTruthy();
expect(model.containsTagPattern(['xx', 'yy'], '* NOT x? NOT ?y')).not.toBeTruthy();
});
it("should escape regex meta characters in patterns", function() {
expect(model.containsTagPattern(['xyz'], 'x.*')).not.toBeTruthy();
expect(model.containsTagPattern(['+.z'], '+.?')).toBeTruthy();
});
}); | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/utest/webcontent/spec/ContainsTag.js | 0.9281 | 0.788868 | ContainsTag.js | pypi |
describe("Text decoder", function () {
function multiplyString(string, times) {
var result = "";
for (var i = 0; i < times; i++){
result += string;
}
return result;
}
it("should have empty string with id 0", function () {
var strings = window.testdata.StringStore(["*"]);
var empty = strings.get(0);
expect(empty).toEqual("");
});
it("should uncompress", function () {
var strings = window.testdata.StringStore(["*", "eNorzk3MySmmLQEASKop9Q=="]);
var decompressed = strings.get(1);
var expected = multiplyString("small", 20);
expect(decompressed).toEqual(expected);
});
it("should uncompress and replace compressed in memory", function () {
var stringArray = ["*", "eNorzk3MySmmLQEASKop9Q=="];
var strings = window.testdata.StringStore(stringArray);
expect(stringArray[1]).toEqual("eNorzk3MySmmLQEASKop9Q==");
strings.get(1);
var expected = multiplyString("small", 20);
expect(stringArray[1]).toEqual("*"+expected);
});
it("should handle plain text", function () {
var strings = window.testdata.StringStore(["*", "*plain text"]);
var actual = strings.get(1);
expect(actual).toEqual("plain text");
});
});
function subSuite(index, suite) {
if (!suite)
suite = window.testdata.suite();
return suite.suites()[index];
}
function firstTest(suite) {
return suite.tests()[0];
}
function nthKeyword(item, n) {
return item.keywords()[n];
}
describe("Handling Suite", function () {
function getDate(offset) {
return new Date(window.output.baseMillis + offset);
}
beforeEach(function () {
window.output = window.suiteOutput;
});
function expectStats(suite, total, passed, critical, criticalPassed){
expect(suite.total).toEqual(total);
expect(suite.totalPassed).toEqual(passed);
expect(suite.totalFailed).toEqual(total-passed);
expect(suite.critical).toEqual(critical);
expect(suite.criticalPassed).toEqual(criticalPassed);
expect(suite.criticalFailed).toEqual(critical-criticalPassed);
}
function endsWith(string, ending) {
var index = string.lastIndexOf(ending);
return string.substring(index) == ending;
}
it("should parse suite", function () {
var suite = window.testdata.suite();
expect(suite.name).toEqual("Suite");
expect(suite.id).toEqual("s1");
expect(suite.status).toEqual("PASS");
expect(endsWith(suite.source, "Suite.txt")).toEqual(true);
expect(suite.doc()).toEqual("<p>suite doc</p>");
expect(suite.times).toBeDefined();
expect(suite.times.elapsedMillis).toBeGreaterThan(0);
expect(suite.times.elapsedMillis).toBeLessThan(1000);
expectStats(suite, 1, 1, 1, 1);
expect(suite.metadata[0]).toEqual(["meta", "<p>data</p>"]);
expect(suite.childrenNames).toEqual(['keyword', 'suite', 'test']);
});
it("should parse test", function () {
var test = firstTest(window.testdata.suite());
expect(test.name).toEqual("Test");
expect(test.id).toEqual("s1-t1");
expect(test.status).toEqual("PASS");
expect(test.fullName).toEqual("Suite.Test");
expect(test.doc()).toEqual("<p>test doc</p>");
expect(test.tags).toEqual(["tag1", "tag2"]);
expect(test.times).toBeDefined();
expect(test.times.elapsedMillis).toBeGreaterThan(0);
expect(test.times.elapsedMillis).toBeLessThan(window.testdata.suite().times.elapsedMillis+1);
expect(test.timeout).toEqual("1 second");
expect(test.childrenNames).toEqual(['keyword']);
});
it("should parse keyword", function () {
var kw = nthKeyword(firstTest(window.testdata.suite()), 0);
expect(kw.name).toEqual("Sleep");
expect(kw.libname).toEqual("BuiltIn");
expect(kw.id).toEqual("s1-t1-k1");
expect(kw.status).toEqual("PASS");
expect(kw.times).toBeDefined();
expect(kw.times.elapsedMillis).toBeGreaterThan(99);
expect(kw.times.elapsedMillis).toBeLessThan(200);
expect(kw.type).toEqual("KEYWORD");
expect(kw.childrenNames).toEqual(['keyword', 'message'])
});
it("should parse for loop", function() {
var forloop = nthKeyword(firstTest(window.testdata.suite()), 1);
expect(forloop.name).toEqual("${i} IN RANGE [ 2 ]");
expect(forloop.type).toEqual("FOR");
var foritem = nthKeyword(forloop, 0);
expect(foritem.name).toEqual("${i} = 0");
expect(foritem.type).toEqual("VAR");
foritem = nthKeyword(forloop, 1);
expect(foritem.name).toEqual("${i} = 1");
expect(foritem.type).toEqual("VAR");
});
it("should parse message", function () {
var message = nthKeyword(firstTest(window.testdata.suite()), 0).messages()[0];
expect(message.text).toEqual("Slept 100 milliseconds");
});
});
describe("Setups and teardowns", function () {
beforeEach(function () {
window.output = window.setupsAndTeardownsOutput;
});
function checkTypeNameArgs(kw, type, name, libname, args) {
expect(kw.type).toEqual(type);
expect(kw.name).toEqual(name);
expect(kw.libname).toEqual(libname);
expect(kw.arguments).toEqual(args);
}
it("should parse suite setup", function () {
var suite = window.testdata.suite();
checkTypeNameArgs(suite.keywords()[0], "SETUP", "Log", "BuiltIn", "suite setup");
});
it("should parse suite teardown", function () {
var suite = window.testdata.suite();
checkTypeNameArgs(suite.keywords()[1], "TEARDOWN", "Log", "BuiltIn", "suite teardown");
});
it("should give navigation uniqueId list for a suite teardown keyword", function () {
var callbackExecuted = false;
window.testdata.ensureLoaded("s1-k2", function (uniqueIds) {
expect(uniqueIds[0]).toEqual(window.testdata.suite().id);
expect(uniqueIds[1]).toEqual(nthKeyword(window.testdata.suite(), 1).id);
expect(uniqueIds.length).toEqual(2);
callbackExecuted = true;
});
expect(callbackExecuted).toBeTruthy();
});
it("should parse test setup", function () {
checkTypeNameArgs(nthKeyword(firstTest(window.testdata.suite()), 0), "SETUP", "Log", "BuiltIn", "test setup");
});
it("should parse test teardown", function () {
var test = firstTest(window.testdata.suite());
checkTypeNameArgs(nthKeyword(test, 2), "TEARDOWN", "Log", "BuiltIn", "test teardown");
});
it("should give suite children in order", function () {
var suite = window.testdata.suite();
var children = suite.children();
expect(children[0]).toEqual(nthKeyword(suite, 0));
expect(children[1]).toEqual(nthKeyword(suite, 1));
expect(children[2]).toEqual(firstTest(suite));
});
it("should give test children in order", function () {
var test = firstTest(window.testdata.suite());
var children = test.children();
checkTypeNameArgs(children[0], "SETUP", "Log", "BuiltIn", "test setup");
checkTypeNameArgs(children[1], "KEYWORD", "Keyword with teardown", "", "");
checkTypeNameArgs(children[2], "TEARDOWN", "Log", "BuiltIn", "test teardown");
});
it("should parse keyword teardown", function () {
var test = firstTest(window.testdata.suite());
var children = test.children();
checkTypeNameArgs(children[1].children()[1], "TEARDOWN", "Log", "BuiltIn", "keyword teardown");
});
});
describe("Time and date formatting", function (){
it("should pad 0 values to full length", function () {
expect(util.dateTimeFromDate(new Date(2011,7-1,1,0,0,0,0))).toEqual("20110701 00:00:00.000");
expect(util.dateFromDate(new Date(2011,7-1,1,0,0,0,0))).toEqual("20110701");
expect(util.timeFromDate(new Date(2011,7-1,1,0,0,0,0))).toEqual("00:00:00.000");
});
it("should pad non empty number to full length", function () {
expect(util.dateTimeFromDate(new Date(2011,7-1,14,12,5,55,101))).toEqual("20110714 12:05:55.101");
});
});
describe("Handling messages", function (){
beforeEach(function (){
window.output = window.messagesOutput;
});
function expectMessage(message, txt, level) {
expect(message.text).toEqual(txt);
expect(message.level).toEqual(level);
}
function kwMessages(kw) {
return nthKeyword(firstTest(window.testdata.suite()), kw).messages();
}
function kwMessage(kw) {
return kwMessages(kw)[0];
}
it("should handle info level message", function () {
expectMessage(kwMessage(1), "infolevelmessage", "INFO");
});
it("should handle warn level message", function () {
expectMessage(kwMessage(2), "warning", "WARN");
});
it("should handle debug level message", function () {
var messages = kwMessages(4);
expectMessage(messages[messages.length-2], "debugging", "DEBUG");
});
it("should handle trace level message", function () {
var messages = kwMessages(5);
expectMessage(messages[messages.length-2], "tracing", "TRACE");
});
it("should handle html level message", function () {
expectMessage(kwMessage(0), "<h1>html</h1>", "INFO");
});
it("should show warning in errors", function () {
var firstError = window.testdata.errorIterator().next();
expectMessage(firstError, "warning", "WARN");
var callbackExecuted = false;
window.testdata.ensureLoaded(firstError.link, function (pathToKeyword) {
var errorKw = window.testdata.findLoaded(pathToKeyword[pathToKeyword.length-1]);
expect(errorKw.messages()[0].level).toEqual("WARN");
callbackExecuted = true;
});
expect(callbackExecuted).toBeTruthy();
});
it("should handle fail level message", function () {
expectMessage(kwMessage(7), "HTML tagged content <a href='http://www.robotframework.org'>Robot Framework</a>", "FAIL");
});
});
describe("Parent Suite Teardown Failure", function (){
beforeEach(function (){
window.output = window.teardownFailureOutput;
});
it("should show test status as failed", function (){
var test = firstTest(window.testdata.suite().suites()[0]);
expect(test.status).toEqual("FAIL");
});
it("should show suite status as failed", function (){
var suite = window.testdata.suite().suites()[0];
expect(suite.status).toEqual("FAIL");
});
it("should show test message 'Parent suite teardown failed.'", function (){
var test = firstTest(window.testdata.suite().suites()[0]);
expect(test.message()).toEqual("Parent suite teardown failed:\nAssertionError");
});
it("should not show suite message", function (){
var suite = window.testdata.suite().suites()[0];
expect(suite.message()).toEqual("");
});
it("should show root suite message 'Suite teardown failed:\nAssertionError'", function (){
var root = window.testdata.suite();
expect(root.message()).toEqual("Suite teardown failed:\nAssertionError");
});
});
describe("Parent Suite Teardown and Test failure", function(){
beforeEach(function (){
window.output = window.teardownFailureOutput;
});
it("should show test message 'In test\n\nAlso parent suite teardown failed.'", function (){
var test = window.testdata.suite().suites()[0].tests()[1];
expect(test.message()).toEqual("In test\n\nAlso parent suite teardown failed:\nAssertionError");
});
})
describe("Test failure message", function (){
beforeEach(function () {
window.output = window.passingFailingOutput;
});
it("should show test failure message ''", function (){
var test = window.testdata.suite().tests()[1];
expect(test.message()).toEqual("In test");
});
});
describe("Iterating Keywords", function (){
beforeEach(function (){
window.output = window.testsAndKeywordsOutput;
});
function test(){
return firstTest(window.testdata.suite());
}
function kw(index){
return test().keyword(index);
}
it("should give correct number of keywords", function () {
expect(test().keywords().length).toEqual(4);
expect(nthKeyword(test(), 0).keywords().length).toEqual(1);
expect(nthKeyword(nthKeyword(test(), 0), 0).keywords().length).toEqual(0);
});
it("should be possible to go through all the keywords in order", function () {
var expectedKeywords = ["kw1", "kw2", "kw3", "kw4"];
for(var i = 0; i < test().numberOfKeywords; i++){
expect(kw(i).name).toEqual(expectedKeywords[i]);
}
});
it("should give keyword children in order", function () {
var keyword = nthKeyword(firstTest(window.testdata.suite()), 0);
var children = keyword.children();
expect(children[0]).toEqual(nthKeyword(keyword, 0));
});
});
describe("Iterating Tests", function (){
beforeEach(function (){
window.output = window.testsAndKeywordsOutput;
});
it("should give correct number of tests", function (){
expect(window.testdata.suite().tests().length).toEqual(4);
});
it("should be possible to go through all the tests in order", function () {
var expectedTests = ["Test 1", "Test 2", "Test 3", "Test 4"];
var tests = window.testdata.suite().tests();
for(var i = 0; i <tests.length ; i++){
expect(tests[i].name).toEqual(expectedTests[i]);
}
});
});
describe("Iterating Suites", function () {
beforeEach(function (){
window.output = window.allDataOutput;
});
it("should give correct number of suites", function (){
var suite = window.testdata.suite();
var subSuites = suite.suites();
expect(subSuites.length).toEqual(5);
expect(subSuites[0].suites().length).toEqual(0);
expect(subSuites[3].suites().length).toEqual(1);
});
it("should be possible to iterate suites", function (){
var tests = 0;
var subSuites = window.testdata.suite().suites();
for(var i = 0 in subSuites){
for(var j in subSuites[i].suites()){
var testsuite = subSuites[i].suites()[j];
tests += testsuite.tests().length;
expect(testsuite.tests().length).toBeGreaterThan(0);
}
}
expect(tests).toEqual(2);
});
it("should show correct full names", function (){
var root = window.testdata.suite();
expect(root.fullName).toEqual("Data");
expect(root.suites()[0].fullName).toEqual("Data.Messages");
expect(root.suites()[3].suites()[0].fullName).toEqual("Data.teardownFailure.PassingFailing");
expect(root.suites()[3].suites()[0].tests()[0].fullName).toEqual("Data.teardownFailure.PassingFailing.Passing");
});
function testensureLoaded(path, callback) {
var callbackExecuted = false;
window.testdata.ensureLoaded(path, function (ids) {
callback(ids);
callbackExecuted = true;
});
expect(callbackExecuted).toBeTruthy();
}
it("should give navigation uniqueId list for a test", function (){
testensureLoaded("s1-s4-s1-t1", function (uniqueIdList) {
var root = window.testdata.suite();
expect(uniqueIdList[0]).toEqual(root.id);
expect(uniqueIdList[1]).toEqual(subSuite(3).id);
expect(uniqueIdList[2]).toEqual(subSuite(3).suites()[0].id);
expect(uniqueIdList[3]).toEqual(subSuite(3).suites()[0].tests()[0].id);
expect(uniqueIdList.length).toEqual(4);
});
});
it("should give navigation uniqueId list for a keyword", function (){
testensureLoaded("s1-s4-s1-t1-k1", function (uniqueIdList) {
var root = window.testdata.suite();
expect(uniqueIdList[0]).toEqual(root.id);
expect(uniqueIdList[1]).toEqual(subSuite(3).id);
expect(uniqueIdList[2]).toEqual(subSuite(3).suites()[0].id);
expect(uniqueIdList[3]).toEqual(subSuite(3).suites()[0].tests()[0].id);
expect(uniqueIdList[4]).toEqual(subSuite(3).suites()[0].tests()[0].keywords()[0].id);
expect(uniqueIdList.length).toEqual(5);
});
});
it("should give navigation uniqueId list for a suite", function (){
testensureLoaded("s1-s4-s1", function (uniqueIdList) {
var root = window.testdata.suite();
expect(uniqueIdList[0]).toEqual(root.id);
expect(uniqueIdList[1]).toEqual(root.suites()[3].id);
expect(uniqueIdList[2]).toEqual(root.suites()[3].suites()[0].id);
expect(uniqueIdList.length).toEqual(3);
});
});
it("should give navigation uniqueId list for the root suite", function (){
testensureLoaded("s1", function (uniqueIdList) {
var root = window.testdata.suite();
expect(uniqueIdList[0]).toEqual(root.id);
expect(uniqueIdList.length).toEqual(1);
});
});
});
describe("Element ids", function (){
beforeEach(function (){
window.output = window.allDataOutput;
});
it("should give id for the main suite", function (){
var suite = window.testdata.suite();
expect(window.testdata.findLoaded(suite.id)).toEqual(suite);
expect(suite.id).toEqual("s1");
});
it("should give id for a test", function (){
var test = subSuite(0, subSuite(3)).tests()[0];
expect(window.testdata.findLoaded(test.id)).toEqual(test);
expect(test.id).toEqual("s1-s4-s1-t1");
});
it("should give id for a subsuite", function (){
var subsuite = subSuite(3);
expect(window.testdata.findLoaded(subsuite.id)).toEqual(subsuite);
expect(subsuite.id).toEqual("s1-s4");
});
it("should give id for a keyword", function (){
var kw = subSuite(0, subSuite(3)).tests()[0].keywords()[0];
expect(window.testdata.findLoaded(kw.id)).toEqual(kw);
expect(kw.id).toEqual("s1-s4-s1-t1-k1");
});
it("should give id for a message", function (){
var msg = subSuite(0, subSuite(3)).tests()[0].keywords()[0].messages()[0];
expect(window.testdata.findLoaded(msg.id)).toEqual(msg);
});
it("should find right elements with right ids", function (){
var suite = subSuite(3);
var kw = subSuite(0, suite).tests()[0].keywords()[0];
expect(kw.id).not.toEqual(suite.id);
expect(window.testdata.findLoaded(kw.id)).toEqual(kw);
expect(window.testdata.findLoaded(suite.id)).toEqual(suite);
});
});
describe("Elements are created only once", function (){
beforeEach(function (){
window.output = window.passingFailingOutput;
});
it("should create suite only once", function (){
var main1 = window.testdata.suite();
var main2 = window.testdata.suite();
expect(main1).not.toBeUndefined();
expect(main1).toEqual(main2);
});
it("should create same test only once", function (){
var test1 = window.testdata.suite().tests()[1];
var test2 = window.testdata.suite().tests()[1];
expect(test1).not.toBeUndefined();
expect(test1).toEqual(test2);
});
it("should create same keyword only once", function (){
var kw1 = window.testdata.suite().tests()[0].keywords()[0];
var kw2 = window.testdata.suite().tests()[0].keywords()[0];
expect(kw1).not.toBeUndefined();
expect(kw1).toEqual(kw2);
});
});
describe("Should split tests and keywords with --splitlog", function (){
beforeEach(function (){
window.output = window.splittingOutput;
var i = 1;
while (window['splittingOutputKeywords'+i]) {
window['keywords'+i] = window['splittingOutputKeywords'+i];
window['strings'+i] = window['splittingOutputStrings'+i];
i = i+1;
}
var originalGetter = window.fileLoading.getCallbackHandlerForKeywords;
window.fileLoading.getCallbackHandlerForKeywords = function(parent) {
var normalResult = originalGetter(parent);
function wrapper(callable) {
parent.isChildrenLoaded = true;
normalResult(callable);
}
return wrapper;
}
});
it("should not load children before needed", function (){
var suite = window.testdata.suite();
var test = firstTest(subSuite(1, suite));
expect(test.isChildrenLoaded).not.toBeTruthy();
expect(test.children()).toBeUndefined();
test.callWhenChildrenReady(function() {});
expect(test.isChildrenLoaded).toBeTruthy();
expect(test.children()).not.toBeUndefined();
});
}); | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/utest/webcontent/spec/ParsingSpec.js | 0.767123 | 0.813238 | ParsingSpec.js | pypi |
window.output = {};
describe("Statistics", function () {
var totals = [
{label: "Critical Tests",
pass: 1,
fail: 1},
{label: "All Tests",
pass: 2,
fail: 3}
];
var tags = [
{label: "first tag",
pass: 3,
fail: 0,
doc: "tagdoc",
info: "critical",
links: "title:url:::t2:u2"},
{label: "second tag",
pass: 1,
fail: 0}
];
var suites = [
{label: "Suite",
pass: 4,
fail: 0,
name: "Suite"},
{label:"Suite.Sub",
pass: 4,
fail: 0,
name: "Sub"}
];
var stats = window.stats.Statistics(totals, tags, suites);
var totalStats = stats.total;
var tagStats = stats.tag;
var suiteStats = stats.suite;
function verifyBasicStatAttributes(stat, label, pass, fail) {
expect(stat.label).toEqual(label);
expect(stat.pass).toEqual(pass);
expect(stat.fail).toEqual(fail);
expect(stat.total).toEqual(pass + fail);
}
function verifySuiteStatNames(stat, name, parentName) {
expect(stat.name).toEqual(name);
expect(stat.formatParentName()).toEqual(parentName);
}
it("should contain critical stats", function () {
verifyBasicStatAttributes(totalStats[0], 'Critical Tests', 1, 1);
});
it("should contain all stats", function () {
verifyBasicStatAttributes(totalStats[1], 'All Tests', 2, 3);
});
it("should contain tag statistics", function () {
var firstTagStats = tagStats[0];
verifyBasicStatAttributes(firstTagStats, 'first tag', 3, 0);
expect(firstTagStats.doc).toEqual('tagdoc');
var secondTagStats = tagStats[1];
verifyBasicStatAttributes(secondTagStats, 'second tag', 1, 0);
});
it("should contain tag stat links", function () {
var tagWithLink = tagStats[0];
expect(tagWithLink.links).toEqual([
{title: "title", url: "url"},
{title: "t2", url: "u2"}
]);
var tagWithNoLink = tagStats[1];
expect(tagWithNoLink.links).toEqual([])
});
it("should contain suite statistics", function () {
var suitestats = suiteStats[0];
verifyBasicStatAttributes(suitestats, 'Suite', 4, 0);
});
it("should contain names and parent names for suite stats", function () {
var statNoParents = suiteStats[0];
verifySuiteStatNames(statNoParents, 'Suite', '');
var statWithParents = suiteStats[1];
verifySuiteStatNames(statWithParents, 'Sub', 'Suite . ');
});
});
describe("Statistics percents and widths", function () {
var totals = [
{label: "Critical Tests",
pass: 0,
fail: 0},
{label: "All Tests",
pass:2,
fail:1}
];
var tags = [
{label: "<0.1% failed",
pass: 2000,
fail: 1},
{label: "<0.1% passed",
pass: 1,
fail: 4000},
{label: "0% failed",
pass: 100,
fail: 0},
{label: "0% passed",
pass: 0,
fail: 30},
{label: "0% passed",
pass: 5005,
fail: 4995}
]
var stats = window.stats.Statistics(totals, tags, []);
var totalStats = stats.total;
var tagStats = stats.tag;
function percentagesShouldBe(stat, passPercent, failPercent) {
expect(stat.passPercent).toEqual(passPercent);
expect(stat.failPercent).toEqual(failPercent);
}
function widthsShouldBe(stat, passWidth, failWidth) {
expect(stat.passWidth).toEqual(passWidth);
expect(stat.failWidth).toEqual(failWidth);
}
it("should count percentages and widths for zero tests to be zero", function () {
var stat = totalStats[0];
percentagesShouldBe(stat, 0, 0);
widthsShouldBe(stat, 0, 0);
});
it("should round floats to one digit in percentages and widths", function () {
var stat = totalStats[1];
percentagesShouldBe(stat, 66.7, 33.3);
widthsShouldBe(stat, 66.7, 33.3);
});
it("should guarantee that non-zero percentages are at least 0.1", function () {
var stat = tagStats[0];
percentagesShouldBe(stat, 99.9, 0.1);
stat = tagStats[1];
percentagesShouldBe(stat, 0.1, 99.9);
});
it("should guarantee that non-zero widths are at least 1", function () {
var stat = tagStats[0];
widthsShouldBe(stat, 99, 1);
stat = tagStats[1];
widthsShouldBe(stat, 1, 99);
});
it("should handle pass/fail percentages and widths of 0 and 100", function () {
var stat = tagStats[2];
percentagesShouldBe(stat, 100, 0);
widthsShouldBe(stat, 100, 0);
stat = tagStats[3];
percentagesShouldBe(stat, 0, 100);
widthsShouldBe(stat, 0, 100);
});
it("should guarantee that widths do not add up to over 100", function () {
var stat = tagStats[4];
percentagesShouldBe(stat, 50.1, 50);
widthsShouldBe(stat, 50, 50);
});
}); | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/utest/webcontent/spec/StatisticsSpec.js | 0.635675 | 0.697171 | StatisticsSpec.js | pypi |
describe("Testing Matcher", function () {
it("should match equal string", function () {
expect(util.Matcher('xxx').matches('xxx')).toBeTruthy();
expect(util.Matcher('xxx').matches('yyy')).not.toBeTruthy();
});
it("should match case and space sensitively", function () {
var matches = util.Matcher('Hello World').matches;
expect(matches('hello WORLD')).toBeTruthy();
expect(matches('HELLOWORLD')).toBeTruthy();
expect(matches('h e l l o w o r l d')).toBeTruthy();
});
it("should support * wildcard", function () {
var matches = util.Matcher('Hello*').matches;
expect(matches('Hello')).toBeTruthy();
expect(matches('Hello world')).toBeTruthy();
expect(matches('HELLOWORLD')).toBeTruthy();
expect(matches('Hillo')).not.toBeTruthy();
});
it("should support ? wildcard", function () {
var matches = util.Matcher('H???o').matches;
expect(matches('Hello')).toBeTruthy();
expect(matches('happo')).toBeTruthy();
expect(matches('Hello!')).not.toBeTruthy();
});
it("should escape regexp meta characters", function () {
var matches = util.Matcher('a+.?').matches;
expect(matches('a+.x')).toBeTruthy();
expect(matches('A+.X')).toBeTruthy();
expect(matches('a+.')).not.toBeTruthy();
expect(matches('aaa')).not.toBeTruthy();
});
it("should match multi line string", function () {
var matches = util.Matcher('first*last').matches;
expect(matches('first line\nand last')).toBeTruthy();
expect(matches('first\nsecond\nthird\nlast')).toBeTruthy();
});
it("should support matching any", function () {
var matchesAny = util.Matcher('ab?d*').matchesAny;
expect(matchesAny(['xxx', 'abcd'])).toBeTruthy();
expect(matchesAny(['xxx', 'abc'])).not.toBeTruthy();
expect(matchesAny([])).not.toBeTruthy();
});
});
describe("Testing parseQueryString", function () {
var parse = util.parseQueryString;
it("should parse empty string", function () {
expect(parse('')).toEqual({});
});
it("should parse one param", function () {
expect(parse('foo=bar')).toEqual({foo: 'bar'});
});
it("should parse multiple params", function () {
expect(parse('a=1&b=2&c=3')).toEqual({a: '1', b: '2', c: '3'});
});
it("should accept param with name alone (i.e. no =)", function () {
expect(parse('foo')).toEqual({foo: ''});
expect(parse('foo&bar')).toEqual({foo: '', bar: ''});
expect(parse('a&b=2&c=&d')).toEqual({a: '', b: '2', c: '', d: ''});
});
it("should accept = in value (although it should be encoded)", function () {
expect(parse('a=1=2&b==')).toEqual({a: '1=2', b: '='});
});
it("should convert + to space", function () {
expect(parse('value=hello+world')).toEqual({value: 'hello world'});
});
it("should decode uri", function () {
expect(parse('value=%26%20%3d')).toEqual({value: '& ='});
});
}); | /robotframework-python3-2.9.tar.gz/robotframework-python3-2.9/utest/webcontent/spec/UtilSpec.js | 0.907896 | 0.849971 | UtilSpec.js | pypi |
from robot.libraries.BuiltIn import BuiltIn
from QConnectBase.utils import *
import QConnectBase.constants as constants
import logging
import os
class ColorFormatter(logging.Formatter):
"""
Custom formatter class for setting log color.
"""
grey = "\x1b[38;21m"
yellow = "\x1b[33;21m"
red = "\x1b[31;21m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
format = constants.LOG_FORMATTER
FORMATS = {
logging.DEBUG: grey + format + reset,
logging.INFO: grey + format + reset,
logging.WARNING: yellow + format + reset,
logging.ERROR: red + format + reset,
logging.CRITICAL: bold_red + format + reset
}
def format(self, record):
"""
Set the color format for the log.
**Arguments:**
* ``record``
/ *Condition*: required / *Type*: str /
Log record.
**Returns:**
/ *Type*: logging.Formatter /
Log with color formatter.
"""
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
class QFileHandler(logging.FileHandler):
"""
Handler class for user defined file in config.
"""
def __init__(self, config, _logger_name, formatter):
"""
Constructor for QFileHandler class.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
* ``_logger_name``
/ *Condition*: required / *Type*: str /
Unused.
* ``formatter``
/ *Condition*: required / *Type*: ColorFormatter /
Log's formatter.
"""
path = self.get_log_path(config)
super(QFileHandler, self).__init__(path)
self.setFormatter(formatter)
@staticmethod
def get_log_path(config):
"""
Get the log file path for this handler.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
**Returns:**
/ *Type*: str /
Log file path.
"""
out_dir = BuiltIn()._context.output._settings.output_directory
dir_log = os.path.dirname(config.logfile)
if not os.path.isabs(dir_log):
dir_log = out_dir + '/' + dir_log
if not os.path.exists(dir_log):
os.makedirs(dir_log)
return "{0}/{1}".format(dir_log, os.path.basename(config.logfile))
@staticmethod
def get_config_supported(config):
"""
Check if the connection config is supported by this handler.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
**Returns:**
/ *Type*: bool /
True if the config is supported.
False if the config is not supported.
"""
return config.logfile is not None and config.logfile != 'nonlog' and config.logfile != 'console'
class QDefaultFileHandler(logging.FileHandler):
"""
Handler class for default log file path.
"""
def __init__(self, _config, logger_name, formatter):
"""
Constructor for QDefaultFileHandler class.
**Arguments:**
* ``_config``
/ *Condition*: required / *Type*: None /
Unused.
* ``logger_name``
/ *Condition*: required / *Type*: str /
Name of the logger.
* ``formatter``
/ *Condition*: required / *Type*: ColorFormatter /
Log's formatter.
**Returns:**
(*no returns*)
"""
path = self.get_log_path(logger_name)
super(QDefaultFileHandler, self).__init__(path, mode='w')
self.setFormatter(formatter)
@staticmethod
def get_log_path(logger_name):
"""
Get the log file path for this handler.
**Arguments:**
* ``logger_name``
/ *Condition*: required / *Type*: str /
Name of the logger.
**Returns:**
/ *Type*: str /
Log file path.
"""
out_dir = BuiltIn()._context.output._settings.output_directory
return "{0}/{1}.log".format(out_dir, logger_name + "_trace")
@staticmethod
def get_config_supported(config):
"""
Check if the connection config is supported by this handler.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
**Returns:**
/ *Type*: bool /
True if the config is supported.
False if the config is not supported.
"""
return config.logfile is None
class QConsoleHandler(logging.StreamHandler):
"""
Handler class for console log.
"""
def __init__(self, _config, _logger_name, _formatter):
"""
Constructor for QDefaultFileHandler class.
**Arguments:**
* ``_config``
/ *Condition*: required / *Type*: None /
Unused.
* ``_logger_name``
/ *Condition*: required / *Type*: str /
Unused.
* ``_formatter``
/ *Condition*: required / *Type*: ColorFormatter /
Unused.
**Returns:**
(*no returns*)
"""
super(QConsoleHandler, self).__init__()
self.setFormatter(ColorFormatter())
@staticmethod
def get_config_supported(config):
"""
Check if the connection config is supported by this handler.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
**Returns:**
/ *Type*: bool /
True if the config is supported.
False if the config is not supported.
"""
return config.logfile == 'console'
class QLogger(Singleton):
"""
Logger class for QConnect Libraries.
"""
NAME_2_LEVEL_DICT = {
'TRACE': logging.NOTSET,
'NONE': logging.CRITICAL + 1
}
def get_logger(self, logger_name):
"""
Get the logger object.
**Arguments:**
* ``logger_name``
/ *Condition*: required / *Type*: str /
Name of the logger.
**Returns:**
* ``logger``
/ *Type*: Logger /
Logger object. .
"""
self.logger_name = logger_name
self.logger = logging.getLogger(logger_name)
self.logger.setLevel(logging.DEBUG)
self.formatter = logging.Formatter(constants.LOG_FORMATTER)
return self.logger
def set_handler(self, config):
"""
Set handler for logger.
**Arguments:**
* ``config``
/ *Condition*: required / *Type*: DictToClass /
Connection configurations.
**Returns:**
* ``handler_ins``
/ *Type*: logging.handler /
None if no handler is set.
Handler object.
"""
# noinspection PyBroadException
try:
log_level = BuiltIn()._context.output._settings.log_level
if log_level in QLogger.NAME_2_LEVEL_DICT:
log_level = QLogger.NAME_2_LEVEL_DICT[log_level]
except:
log_level = logging.INFO
supported_handler_classes_list = Utils.get_all_descendant_classes(logging.StreamHandler)
for handler in supported_handler_classes_list:
# noinspection PyBroadException
try:
if handler.get_config_supported(config):
handler_ins = handler(config, self.logger_name, self.formatter)
handler_ins.setLevel(log_level)
self.logger.addHandler(handler_ins)
return handler_ins
except:
pass
return None | /robotframework-qconnect-base-1.1.3.tar.gz/robotframework-qconnect-base-1.1.3/QConnectBase/qlogger.py | 0.83868 | 0.151498 | qlogger.py | pypi |
from __future__ import with_statement
from QConnectBase.tcp.tcp_base import BrokenConnError, TCPBase, TCPBaseServer, TCPBaseClient
class RawTCPBase(TCPBase):
"""
Base class for a raw tcp connection.
"""
def _read(self):
"""
Actual method to read message from a tcp connection.
**Returns:**
Empty string.
"""
data = ''
while 1:
data = data + self.conn.recv(1).decode(self.config.encoding, 'ignore')
# Simple socket expects \r\n for terminating a message
if data[(eol:=-2):] == "\r\n" or data[(eol:=-1):] == "\n":
break
if data == '':
raise BrokenConnError("socket connection broken")
# remove \r\n or \n
data = data[:eol]
return data
def _send(self, msg, cr):
"""
Actual method to send message to a tcp connection.
**Arguments:**
* ``obj``
/ *Condition*: required / *Type*: str /
Data to be sent.
* ``cr``
/ *Condition*: optional / *Type*: str /
Determine if it's necessary to add newline character at the end of command.
**Returns:**
(*no returns*)
"""
sent = 0
with self._send_lock:
while sent < len(msg):
sent += self.conn.send(msg[sent:].encode(self.config.encoding))
if cr and msg != "":
self.conn.send("\r\n".encode(self.config.encoding))
class RawTCPServer(TCPBaseServer, RawTCPBase):
"""
Class for a raw tcp connection server.
"""
_CONNECTION_TYPE = "TCPIPServer"
def __init__(self, mode=None, config=None):
"""
Constructor of RawTCPServer class.
**Arguments:**
* ``mode``
/ *Condition*: optional / *Type*: str / *Default*: None /
TCP mode.
* ``config``
/ *Condition*: optional / *Type*: dict / *Default*: None /
Configuration for TCP connection in dictionary format.
"""
super(RawTCPServer, self).__init__(mode, config)
self._bind()
class RawTCPClient(TCPBaseClient, RawTCPBase):
"""
Class for a raw tcp connection client.
"""
_CONNECTION_TYPE = "TCPIPClient"
def __init__(self, mode=None, config=None):
"""
Constructor of RawTCPClient class.
**Arguments:**
* ``mode``
/ *Condition*: optional / *Type*: str / *Default*: None /
TCP mode.
* ``config``
/ *Condition*: optional / *Type*: dict / *Default*: None /
Configuration for TCP connection in dictionary format.
"""
super(RawTCPClient, self).__init__(mode, config) | /robotframework-qconnect-base-1.1.3.tar.gz/robotframework-qconnect-base-1.1.3/QConnectBase/tcp/raw/raw_tcp.py | 0.774242 | 0.153994 | raw_tcp.py | pypi |
from robot.libraries import BuiltIn
from .keywordgroup import KeywordGroup
BUILTIN = BuiltIn.BuiltIn()
class _RunOnFailureKeywords(KeywordGroup):
def __init__(self):
self._run_on_failure_keyword = None
self._running_on_failure_routine = False
# Public
def _register_keyword_to_run_on_failure(self, keyword):
"""Sets the keyword to execute when a Selenium2Library keyword fails.
`keyword_name` is the name of a keyword (from any available
libraries) that will be executed if a Selenium2Library keyword fails.
It is not possible to use a keyword that requires arguments.
Using the value "Nothing" will disable this feature altogether.
The initial keyword to use is set in `importing`, and the
keyword that is used by default is `Capture Page Screenshot`.
Taking a screenshot when something failed is a very useful
feature, but notice that it can slow down the execution.
This keyword returns the name of the previously registered
failure keyword. It can be used to restore the original
value later.
Example:
| Register Keyword To Run On Failure | Log Source | # Run `Log Source` on failure. |
| ${previous kw}= | Register Keyword To Run On Failure | Nothing | # Disables run-on-failure functionality and stores the previous kw name in a variable. |
| Register Keyword To Run On Failure | ${previous kw} | # Restore to the previous keyword. |
This run-on-failure functionality only works when running tests on Python/Jython 2.4
or newer and it does not work on IronPython at all.
"""
old_keyword = self._run_on_failure_keyword
old_keyword_text = old_keyword if old_keyword is not None else "No keyword"
new_keyword = keyword if keyword.strip().lower() != "nothing" else None
new_keyword_text = new_keyword if new_keyword is not None else "No keyword"
self._run_on_failure_keyword = new_keyword
self._info('%s will be run on failure.' % new_keyword_text)
return old_keyword_text
# Private
def _run_on_failure(self):
if self._run_on_failure_keyword is None:
return
if self._running_on_failure_routine:
return
self._running_on_failure_routine = True
try:
BUILTIN.run_keyword(self._run_on_failure_keyword)
except Exception as err:
self._run_on_failure_error(err)
finally:
self._running_on_failure_routine = False
def _run_on_failure_error(self, err):
err = "Keyword '%s' could not be run on failure: %s" % (self._run_on_failure_keyword, err)
if hasattr(self, '_warn'):
self._warn(err)
return
raise Exception(err) | /robotframework-qtlibrary-1.0.2.tar.gz/robotframework-qtlibrary-1.0.2/src/QTLibrary/keywords/_runonfailure.py | 0.780412 | 0.197019 | _runonfailure.py | pypi |
import json
import logging
from typing import Any, Dict, List, Optional, Tuple, Type, Union
from urllib.parse import quote
import pika
import requests
from pika import BaseConnection
from pika.adapters.blocking_connection import BlockingChannel
from pika.connection import Parameters
from pika.frame import Method as FrameMethod
from pika.spec import Basic, BasicProperties, Connection
from pika.exceptions import ChannelClosed, IncompatibleProtocolError
from robot.api import logger
from robot.utils import ConnectionCache
from robot.libraries.BuiltIn import BuiltIn
from socket import gaierror, error
RabbitMqMessage = Union[Tuple[Dict[str, Any], Dict[str, Any], str], Tuple[None, None, None]] # noqa: 993
class RequestConnection(object):
"""This class contains settings to connect to RabbitMQ via HTTP."""
def __init__(self, host: str, port: Union[int, str], username: str, password: str, timeout: int) -> None:
"""
Initialization.
*Args:*\n
_host_ - server host name;\n
_port_ - port number;\n
_username_ - user name;\n
_password_ - user password;\n
_timeout_ - connection timeout;\n
"""
self.host = host
self.port = port
self.url = f'http://{host}:{port}/api'
self.auth = (username, password)
self.timeout = timeout
def close(self) -> None:
"""Close connection."""
pass
class BlockedConnection(pika.BlockingConnection):
"""
Wrapper over standard connection to RabbitMQ
Allows to register connection lock events of the server
"""
def __init__(self, parameters: Parameters = None, impl_class: Type[BaseConnection] = None) -> None:
"""Constructor arguments are supplemented with
callbacks to register blocking events
Args:
parameters: connection parameters instance or non-empty sequence of them;
impl_class: implementation class (for test/debugging only).
"""
super(BlockedConnection, self).__init__(parameters=parameters, _impl_class=impl_class)
self.add_on_connection_blocked_callback(self.on_blocked)
self.add_on_connection_unblocked_callback(self.on_unblocked)
self._blocked = False
def on_blocked(self, method: Connection.Blocked) -> None:
"""
Set connection blocking flag.
Args:
method: the method frame's `method` member is of type `pika.spec.Connection.Blocked`.
"""
self._blocked = True
def on_unblocked(self, method: Connection.Unblocked) -> None:
"""
Unset connection blocking flag.
Args:
method: the method frame's `method` member is of type `pika.spec.Connection.Unblocked`.
"""
self._blocked = False
@property
def blocked(self) -> bool:
"""
*Returns:*\n
Connection blocking flag.
"""
return self._blocked
def close(self, reply_code: int = 200, reply_text: str = 'Normal shutdown') -> None:
"""Close AMQP connection.
Args:
reply_code: the code number for the close.
reply_text: the text reason for the close.
"""
if self.is_open:
super().close(reply_code=reply_code, reply_text=reply_text)
else:
logger.debug("Connection is already closed.")
class RabbitMq(object):
"""
Library for working with RabbitMQ.
== Dependencies ==
| pika | https://pypi.org/project/pika/ |
| requests | https://pypi.python.org/pypi/requests |
| robot framework | http://robotframework.org |
== Example ==
| *Settings* | *Value* |
| Library | RabbitMq |
| Library | Collections |
| *Test Cases* | *Action* | *Argument* | *Argument* | *Argument* | *Argument* | *Argument* |
| Simple |
| | Create Rabbitmq Connection | my_host_name | 15672 | 5672 | guest | guest | alias=rmq |
| | ${overview}= | Overview |
| | Log Dictionary | ${overview} |
| | Close All Rabbitmq Connections |
"""
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self) -> None:
""" Initialization. """
self._http_connection: Optional[RequestConnection] = None
self._http_cache = ConnectionCache()
self._amqp_connection: Optional[BlockedConnection] = None
self._amqp_cache = ConnectionCache()
self._channel: Optional[BlockingChannel] = None
logging.getLogger("pika").setLevel(logging.WARNING)
@property
def http_connection(self) -> RequestConnection:
"""Get current http connection to RabbitMQ.
*Raises:*\n
RuntimeError: if there isn't any open connection.
*Returns:*\n
Current http connection to to RabbitMQ.
"""
if self._http_connection is None:
raise RuntimeError('There is no open http connection to RabbitMQ.')
return self._http_connection
@property
def amqp_connection(self) -> BlockedConnection:
"""Get current ampq connection to RabbitMQ.
*Raises:*\n
RuntimeError: if there isn't any open connection.
*Returns:*\n
Current ampq connection to to RabbitMQ.
"""
if self._amqp_connection is None:
raise RuntimeError('There is no open ampq connection to RabbitMQ.')
return self._amqp_connection
def _connect_to_amqp(self, host: str, port: Union[int, str], username: str = 'guest', password: str = 'guest',
alias: str = None, virtual_host: str = '/', socket_timeout: int = 15,
heartbeat_timeout: int = 600, blocked_timeout: int = 300) -> int:
""" Connect to server via AMQP.
*Args*:\n
_host_: server host name.\n
_port_: port number.\n
_username_: user name.\n
_password_: user password.\n
_alias_: connection alias.\n
_virtual_host_: virtual host name;\n
_socket_timeout_: socket connect timeout;\n
_heartbeat_timeout_: AMQP heartbeat timeout negotiation during connection tuning;\n
_blocked_timeout_: timeout for the connection to remain blocked.\n
*Returns:*\n
Server connection index.
"""
if port is None:
BuiltIn().fail(msg="RabbitMq: port for connect is None")
port = int(port)
if virtual_host is None:
BuiltIn().fail(msg="RabbitMq: virtual host for connect is None")
virtual_host = str(virtual_host)
parameters_for_connect = \
f"host={host}, port={port}, username={username}, timeout={socket_timeout}, alias={alias}"
logger.debug(f'Connecting using : {parameters_for_connect}')
credentials = pika.PlainCredentials(username=username, password=password)
conn_params = pika.ConnectionParameters(host=host, port=port,
credentials=credentials,
virtual_host=virtual_host,
socket_timeout=socket_timeout,
blocked_connection_timeout=blocked_timeout,
heartbeat=heartbeat_timeout)
try:
self._amqp_connection = BlockedConnection(parameters=conn_params)
except (gaierror, error, IOError, IncompatibleProtocolError):
BuiltIn().fail(msg=f"RabbitMq: Could not connect with following parameters: {parameters_for_connect}")
self._channel = None
return self._amqp_cache.register(self._amqp_connection, alias)
def _connect_to_http(self, host: str, port: Union[int, str], username: str, password: str, alias: str) -> int:
""" Connect to server via HTTP.
*Args*:\n
_host_: server host name.\n
_port_: port number.\n
_username_: user name.\n
_password_: user password.\n
_alias_: connection alias.\n
*Returns:*\n
Server connection index.
"""
if port is None:
BuiltIn().fail(msg="RabbitMq: port for connect is None")
port = int(port)
timeout = 15
parameters_for_connect = f"host={host}, port={port}, username={username}, timeout={timeout}, alias={alias}"
logger.debug('Connecting using : {params}'.format(params=parameters_for_connect))
try:
self._http_connection = RequestConnection(host, port, username, password, timeout)
except (gaierror, error, IOError):
BuiltIn().fail(msg=f"RabbitMq: Could not connect with following parameters: {parameters_for_connect}")
return self._http_cache.register(self._http_connection, alias)
def create_rabbitmq_connection(self, host: str, http_port: Union[int, str], amqp_port: Union[int, str],
username: str, password: str, alias: str, vhost: str) -> None:
"""
Connect to RabbitMq server.
*Args:*\n
_host_ - server host name;\n
_http_port_ - port number of http-connection \n
_amqp_port_ - port number of amqp-connection \n
_username_ - user name;\n
_password_ - user password;\n
_alias_ - connection alias;\n
_vhost_ - virtual host name;\n
*Returns:*\n
Current connection index.
*Raises:*\n
socket.error if connection cannot be created.
*Example:*\n
| Create Rabbitmq Connection | my_host_name | 15672 | 5672 | guest | guest | alias=rmq | vhost=/ |
"""
self._connect_to_http(host=host, port=http_port, username=username, password=password, alias=alias + "_http")
self._connect_to_amqp(host=host, port=amqp_port, username=username, password=password, alias=alias + "_amqp",
virtual_host=vhost)
def switch_rabbitmq_connection(self, alias: str) -> int:
"""Switch between active RabbitMq connections using their index or alias.\n
Alias is set in keyword [#Create Rabbitmq Connection|Create Rabbitmq Connection]
which also returns the index of connection.\n
*Args:*\n
_alias_ - connection alias;
*Returns:*\n
Index of previous connection.
*Example:*\n
| Create Rabbitmq Connection | my_host_name_1 | 15672 | 5672 | guest | guest | alias=rmq1 |
| Create Rabbitmq Connection | my_host_name_2 | 15672 | 5672 | guest | guest | alias=rmq2 |
| Switch Rabbitmq Connection | rmq1 |
| ${live}= | Is alive |
| Switch Rabbitmq Connection | rmq2 |
| ${live}= | Is alive |
| Close All Rabbitmq Connections |
"""
old_index = self._http_cache.current_index
logger.debug(f'Switch active connection from {old_index} to {alias}')
self._http_connection = self._http_cache.switch(alias + '_http')
self._amqp_connection = self._amqp_cache.switch(alias + '_amqp')
self._channel = None
return old_index
def disconnect_from_rabbitmq(self) -> None:
"""
Close current RabbitMq connection.
*Example:*\n
| Create Rabbitmq Connection | my_host_name | 15672 | 5672 | guest | guest | alias=rmq |
| Disconnect From Rabbitmq |
"""
logger.debug(f'Close connection with : host={self.http_connection.host}, port={self.http_connection.port}')
self.http_connection.close()
self._http_connection = None
self._channel = None
if self._amqp_connection is not None:
if self._amqp_connection.is_open:
self._amqp_connection.close()
self._amqp_connection = None
def close_all_rabbitmq_connections(self) -> None:
"""
Close all RabbitMq connections.
This keyword is used to close all connections only in case if there are several open connections.
Do not use keywords [#Disconnect From Rabbitmq|Disconnect From Rabbitmq] and
[#Close All Rabbitmq Connections|Close All Rabbitmq Connections] together.\n
After this keyword is executed the index returned by [#Create Rabbitmq Connection|Create Rabbitmq Connection]
starts at 1.\n
*Example:*\n
| Create Rabbitmq Connection | my_host_name | 15672 | 5672 | guest | guest | alias=rmq |
| Close All Rabbitmq Connections |
"""
self._http_cache.close_all()
self._http_connection = None
self._amqp_cache.close_all()
self._amqp_connection = None
self._channel = None
# AMQP API
def _get_channel(self) -> BlockingChannel:
""" Get channel from current connection.
*Returns:*\n
Channel.
"""
if self._channel is None:
self._channel = self.amqp_connection.channel()
if self.amqp_connection.blocked:
raise Exception('Connection is blocked')
return self._channel
def create_exchange(self, exchange_name: str, exchange_type: str, auto_delete: bool = False,
durable: bool = False, arguments: Dict[str, Any] = None) -> None:
"""
Create exchange.
The parameter _arguments_ is passed as dictionary.\n
When defining "alternate-exchange" argument in the dictionary
it is necessary to pass exchange's alternative name
(if message cannot be routed it will be sent to alternative exchange).\n
*Args:*\n
_exchange_name_ - exchange name;\n
_exchange_type_ - exchange type (direct, topic, headers, fanout);\n
_auto_delete_ - delete exchange when all queues finish working with it (true, false);\n
_durable_ - exchange survives when broker restarts (true, false);\n
_arguments_ - additional arguments in dictionary format;\n
*Example:*\n
| ${list}= | Create List | list_value | ${TRUE} | 18080 |
| ${args}= | Create Dictionary | arg1=value1 | arg2=${list} | alternate-exchange=amq.fanout |
| Create Exchange | exchange_name=testExchange | exchange_type=fanout | auto_delete=false | durable=true | arguments=${args} |
"""
exchange_name = str(exchange_name)
exchange_type = str(exchange_type)
logger.debug(f"Creating new exchange {exchange_name} with type {exchange_type}")
self._get_channel().exchange_declare(exchange=exchange_name,
exchange_type=exchange_type,
durable=durable,
auto_delete=auto_delete,
arguments=arguments)
def is_exchange_exist(self, name: str, exchange_type: str) -> bool:
"""
Check if exchange exists
*Args:*\n
_name_ - exchange name;\n
_exchange_type_ - exchange type;\n
*Example:*\n
| ${is_exist}= | Is Exchange Exist | name='name' | exchange_type='direct' |
| Should Be True | ${is_exist} |
*Returns:*\n
True if exchange exists otherwise False
"""
name = str(name)
exchange_type = str(exchange_type)
try:
self._get_channel().exchange_declare(exchange=name, exchange_type=exchange_type, passive=True)
return True
except ChannelClosed:
return False
def delete_exchange(self, exchange_name: str) -> None:
"""
Delete exchange.
*Args:*\n
_exchange_name_ - exchange name;\n
*Example:*\n
| Delete Exchange | exchange_name=testExchange |
"""
exchange_name = str(exchange_name)
self._get_channel().exchange_delete(exchange=exchange_name)
def create_queue(self, queue_name: str, auto_delete: bool = False, durable: bool = False,
node: str = None, arguments: Dict[str, Any] = None) -> None:
"""
Create queue.
*Args:*\n
_queue_name_ - queue name (quoted with requests.utils.quote);\n
_auto_delete_ - delete queue when last subscriber unsubscribes from queue (true, false);\n
_durable_ - queue survives when broker restarts (true, false);\n
_node_ - RabbitMq node name;\n
_arguments_ - additional arguments in dictionary format;\n
*Example:*\n
| ${list}= | Create List | list_value | ${FALSE} | 15240 |
| ${args}= | Create Dictionary | arg1=value1 | arg2=${list} |
| Create Queue | queue_name=testQueue | auto_delete=false | durable=true | node=rabbit@primary | arguments=${args} |
"""
queue_name = str(queue_name)
logger.debug('Create queue {n}'.format(n=queue_name))
self._get_channel().queue_declare(queue=queue_name, durable=durable, auto_delete=auto_delete,
arguments=arguments)
def is_queue_exist(self, name: str) -> bool:
"""
Check if queue exists
*Args:*\n
_name_ - queue name
*Example:*\n
| ${exist}= | Is Queue Exist | name='queue' |
| Should Be True | ${exist} |
*Returns:*\n
True if queue exists otherwise False
"""
try:
self._get_channel().queue_declare(queue=name, passive=True)
return True
except ChannelClosed:
return False
def binding_exchange_with_queue(self, exchange_name: str, queue_name: str, routing_key: str = '',
arguments: Dict[str, Any] = None) -> None:
"""
Create binding of exchange with queue.
*Args:*\n
_exchange_name_ - exchange name;\n
_queue_name_ - queue name;\n
_routing_key_ - routing key;\n
_arguments_ - additional arguments in dictionary format;\n
*Example:*\n
| ${list}= | Create List | str1 | ${FALSE} |
| ${args}= | Create Dictionary | arg1=value1 | arg2=${list} |
| Binding Exchange With Queue | exchange_name=testExchange | queue_name=testQueue | routing_key=key | arguments=${args} |
"""
queue_name = str(queue_name)
exchange_name = str(exchange_name)
logger.debug(f'Binding queue {queue_name} to exchange {exchange_name}, with routing key {routing_key}')
self._get_channel().queue_bind(queue=queue_name, exchange=exchange_name, routing_key=routing_key,
arguments=arguments)
def unbind_queue(self, queue_name: str, exchange_name: str, routing_key: str = '',
arguments: Dict[str, Any] = None) -> None:
"""
Unbind queue from exchange.
*Args:*\n
_queue_name_ - queue name;\n
_exchange_name_ - exchange name;\n
_routing_key_ - routing key;\n
_arguments_ - additional arguments in dictionary format;\n
"""
queue_name = str(queue_name)
exchange_name = str(exchange_name)
logger.debug(f'Unbind queue {queue_name} from exchange {exchange_name} with routing key {routing_key}')
self._get_channel().queue_unbind(queue=queue_name, exchange=exchange_name, routing_key=routing_key,
arguments=arguments)
def purge_queue(self, queue_name: str) -> None:
"""
Purge queue.
*Args:*\n
_queue_name_ - queue name;\n
"""
queue_name = str(queue_name)
logger.debug(f'Purge queue {queue_name}')
self._get_channel().queue_purge(queue=queue_name)
def delete_queue(self, queue_name: str) -> None:
"""
Delete queue.
*Args:*\n
_queue_name_ - queue name;\n
*Example:*\n
| Delete Queue | queue_name=testQueue |
"""
queue_name = str(queue_name)
self._get_channel().queue_delete(queue=queue_name)
def enable_consuming_messages_in_queue(self, queue_name: str, count: int, requeue: bool,
consumed_list: List[int]) -> str:
"""
Enable consuming messages in queue.
*Args:*\n
_queue_name_ - queue name;\n
_count_ - number of messages to consume;\n
_requeue_ - re-placing consumed message in the queue with setting of redelivered attribute (true, false);\n
_consumed_list_ - list of delivery_tag of all consumed messages;\n
*Returns:*\n
Identifier of message handler in the queue.
*Example:*\n
| ${list}= | Create List |
| Enable Consuming Messages In Queue | queue_name=${QUEUE_NAME} | count=1 | requeue=${FALSE} | consumed_list=${list} |
| Log List | ${list} |
"""
count = int(count)
queue_name = str(queue_name)
consumer_name = f"consumer{queue_name}"
def on_message_callback(channel: BlockingChannel, method: Basic.Deliver, properties: BasicProperties,
body: bytes) -> None:
"""
Callback for consuming messages from the queue.
Processes specified number of messages and closes.
*Args:*\n
channel: BlockingChannel;
method: spec.Basic.Deliver;
properties: spec.BasicProperties;
body: bytes.
"""
tag = method.delivery_tag
logger.debug(f"Consume message {tag} - {body}")
channel.basic_reject(tag, requeue)
consumed_list.append(tag)
if len(consumed_list) >= count:
channel.basic_cancel(consumer_name)
logger.debug(f'Begin consuming messages. Queue={queue_name}, count={count}')
self._get_channel().basic_consume(queue=queue_name, consumer_tag=consumer_name,
on_message_callback=on_message_callback)
return consumer_name
def get_message_from_queue(self, queue_name: str, ack: bool = True) -> RabbitMqMessage:
"""Getting single message from RabbitMQ queue.
Method gets first message from queue and acks it if ack=True.
*Args:*\n
queue_name: queue_name; \n
ack: ack message or not (default=True);\n
*Returns:*\n
delivery_data: delivery_data dictionary.
message_properties: message properties dictionary.
body: message body.
If queue is empty, returns None, None, None.
*Example:*\n
| Get Message From Queue | my_queue_name|
"""
method, properties, body = self._get_channel().basic_get(
queue=queue_name)
if not (method and properties and body):
return None, None, None
else:
delivery_data = {
'delivery_tag': method.delivery_tag,
'redelivered': method.redelivered,
'exchange': method.exchange,
'routing_key': method.routing_key,
'message_count': method.message_count
}
message_properties = {
'content_type': properties.content_type,
'content_encoding': properties.content_encoding,
'headers': properties.headers,
'delivery_mode': properties.delivery_mode,
'priority': properties.priority,
'correlation_id': properties.correlation_id,
'reply_to': properties.reply_to,
'expiration': properties.expiration,
'message_id': properties.message_id,
'timestamp': properties.timestamp,
'type': properties.type,
'user_id': properties.user_id,
'app_id': properties.app_id,
'cluster_id': properties.cluster_id
}
if ack:
delivery_tag = delivery_data['delivery_tag']
self._get_channel().basic_ack(delivery_tag=delivery_tag)
return delivery_data, message_properties, body
def publish_message(self, exchange_name: str, routing_key: str, payload: str, props: Dict[str, Any] = None) -> None:
"""
Publish message to the queue.
*Args:*\n
_exchange_name_ - exchange name;\n
_routing_key_ - routing key (quoted with requests.utils.quote);\n
_payload_ - payload message;\n
_props_ - additional arguments in dictionary format;\n
Includes such keys as:\n
- _content-type_ - message content type (shortstr);
- _content_encoding_ - message encoding type (shortstr);
- _headers_ - message headers table, a dictionary with keys of type string and values of types
string | int | Decimal | datetime | dict values (table);
- _delivery_mode_ - Non-persistent (1) or persistent (2) (octet);
- _priority_ - message priority from 0 to 9 (octet);
- _correlation_id_ - message identifier to which current message responds (shortstr);
- _reply_to_ - commonly used to name a reply queue (shortstr);
- _expiration_ - expiration date of message (shortstr);
- _message_id_ - message identifier (shortstr);
- _timestamp_ - timestamp of sending message (shortstr);
- _type_ - message type (shortstr);
- _user_id_ - user-sender identifier (shortstr);
- _app_id_ - application identifier (shortstr);
- _cluster_id_ - cluster identifier (shortstr);\n
*Attention:*\n
When using library in robot-files parameters (props)
must be cast to the correct type.\n
Example:\n
| ${delivery_mode}= | Convert To Integer | 2 |
This is due to the feature of RabbitMq library.\n
*Example:*\n
| ${list_headers}= | Create List | head_value | 2 | ${TRUE} |
| ${headers_dict}= | Create Dictionary | head1=val1 | head2=${list_headers} |
| ${prop_dict}= | Create Dictionary | application_headers=${headers_dict} | content-type=text/plain | priority=1 | expiration=1410966000 | message_id=101 | user_id=guest |
| Publish Message | exchange_name=testExchange | routing_key=testQueue | payload=message body | props=${prop_dict} |
"""
if props is not None:
props = pika.BasicProperties(**props)
exchange_name = str(exchange_name)
routing_key = str(routing_key)
logger.debug(f'Publish message to {exchange_name} with routing {routing_key}')
self._get_channel().basic_publish(exchange=exchange_name, routing_key=routing_key,
body=payload, properties=props)
def process_published_message_in_queries(self, waiting: int = 1) -> None:
"""
Send processing of published message in queues to handler.\n
May end with exception if handler is not installed or there are no messages in queue.\n
*Args:*\n
_waiting_ - server response timeout.
"""
waiting = int(waiting)
self.amqp_connection.process_data_events(time_limit=waiting)
def enable_message_sending_confirmation(self, confirmed_list: List[str], activate: bool = True) -> None:
"""
Enable processing of successful message sending confirmation in the exchange servers.\n
If message is successfully sent to confirmed_list, delivery_tag of the message is added.\n
*Args:*\n
_confirmed_list_ - list in which all the delivery tag of sent messages are saved;\n
_activate_ - indicates that message sending listener should start;\n
*Example:*\n
| ${list}= | Create List |
| Enable Message Sending Confirmation | confirmed_list=${list} |
| Publish Message | exchange_name=${EXCHANGE_NAME} | routing_key=${ROUTING_KEY} | payload=message body |
| Process Published Message In Queries |
| Length Should Be | ${list} | 1 |
"""
def confirm_callback(method: FrameMethod) -> None:
"""
Called when sending message notification is received.
"""
delivery_tag = method.method.delivery_tag
logger.debug(f'Capture confirm message with tag={delivery_tag}')
confirmed_list.append(delivery_tag)
self._get_channel().confirm_delivery()
logger.debug('Begin checking confirm publish')
if activate is True:
self._get_channel()._impl.add_callback(callback=confirm_callback,
replies=[pika.spec.Basic.Ack],
one_shot=False)
# Manager API
@staticmethod
def _prepare_request_headers(body: Dict[str, Any] = None) -> Dict[str, str]:
"""
Headers definition for HTTP-request.
Args:*\n
_body_: HTTP-request body.
*Returns:*\n
Dictionary with headers for HTTP-request.
"""
headers = {}
if body:
headers["Content-Type"] = "application/json"
return headers
@staticmethod
def _quote_vhost(vhost: str) -> str:
""" Vhost quote.
*Args:*\n
_vhost_: vhost name for quoting.
*Returns:*\n
Quoted name of vhost.
"""
if vhost == '/':
vhost = '%2F'
if vhost != '%2F':
vhost = quote(vhost)
return vhost
def is_alive(self) -> bool:
"""
Rabbitmq health check.
Sends GET-request : 'http://<host>:<port>/api/' and checks response status code.\n
*Returns:*\n
bool True if return code is 200.
bool False in all other cases.
*Raises:*\n
RequestException if it is not possible to send GET-request.
*Example:*\n
| ${live}= | Is Alive |
=>\n
True
"""
try:
response = requests.get(self.http_connection.url,
auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
except requests.exceptions.RequestException as e:
raise Exception(f'Could not send request: {e}')
logger.debug(f'Response status={response.status_code}')
return response.status_code == 200
def overview(self) -> Dict[str, Any]:
""" Information about RabbitMq server.
*Returns:*\n
Dictionary with information about the server.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| ${overview}= | Overview |
| Log Dictionary | ${overview} |
| ${version}= | Get From Dictionary | ${overview} | rabbitmq_version |
=>\n
Dictionary size is 14 and it contains following items:
| cluster_name | rabbit@primary |
| contexts | [{'node': 'rabbit@primary', 'path': '/', 'description': 'RabbitMQ Management', 'port': 15672}, {'node': 'rabbit@primary', 'path': '/web-stomp-examples', 'description': 'WEB-STOMP: examples', 'port': 15670}] |
| erlang_full_version | Erlang R16B03 (erts-5.10.4) [source] [64-bit] [async-threads:30] [kernel-poll:true] |
| erlang_version | R16B03 |
| exchange_types | [{'enabled': True, 'name': 'fanout', 'description': 'AMQP fanout exchange, as per the AMQP specification'}, {'internal_purpose': 'federation', 'enabled': True, 'name': 'x-federation-upstream', 'description': 'Federation upstream helper exchange'}, {'enabled': True, 'name': 'direct', 'description': 'AMQP direct exchange, as per the AMQP specification'}, {'enabled': True, 'name': 'headers', 'description': 'AMQP headers exchange, as per the AMQP specification'}, {'enabled': True, 'name': 'topic', 'description': 'AMQP topic exchange, as per the AMQP specification'}, {'enabled': True, 'name': 'x-consistent-hash', 'description': 'Consistent Hashing Exchange'}] |
| listeners | [{'node': 'rabbit@primary', 'ip_address': '::', 'protocol': 'amqp', 'port': 5672}, {'node': 'rabbit@primary', 'ip_address': '::', 'protocol': 'clustering', 'port': 25672}, {'node': 'rabbit@primary', 'ip_address': '::', 'protocol': 'mqtt', 'port': 1883}, {'node': 'rabbit@primary', 'ip_address': '::', 'protocol': 'stomp', 'port': 61613}] |
| management_version | 3.3.0 |
| message_stats | {'publish_details': {'rate': 0.0}, 'confirm': 85, 'deliver_get': 85, 'publish': 85, 'confirm_details': {'rate': 0.0}, 'get_no_ack': 85, 'get_no_ack_details': {'rate': 0.0}, 'deliver_get_details': {'rate': 0.0}} |
| node | rabbit@primary |
| object_totals | {'connections': 0, 'channels': 0, 'queues': 2, 'consumers': 0, 'exchanges': 10} |
| queue_totals | {'messages_details': {'rate': 0.0}, 'messages': 0, 'messages_ready': 0, 'messages_ready_details': {'rate': 0.0}, 'messages_unacknowledged': 0, 'messages_unacknowledged_details': {'rate': 0.0}} |
| rabbitmq_version | 3.3.0 |
| statistics_db_node | rabbit@primary |
| statistics_level | fine |
${version} = 3.3.0
"""
url = self.http_connection.url + '/overview'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def connections(self) -> List[Dict[str, Any]]:
""" List of open connections.
*Returns:*\n
List of open connections in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/connections'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_name_of_all_connections(self) -> List[str]:
""" List with names of all open connections.
*Returns:*\n
List with names of all open connections.
"""
return [item['name'] for item in self.connections()]
def channels(self) -> List[Dict[str, Any]]:
""" List of open channels.
*Returns:*\n
List of open channels in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/channels'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_exchange(self, exchange_name: str, vhost: str = '%2F') -> Dict[str, Any]:
""" Get information about exchange.
Parameters are quoted with requests.utils.quote.
*Args:*\n
_exchange_name_ - exchange name;\n
_vhost_ - virtual host name;\n
*Returns:*\n
Dictionary with information about exchange.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| ${exchange}= | Get Exchange | exchange_name=testExchange | vhost=/ |
| Log Dictionary | ${exchange} |
| ${value}= | Get From Dictionary | ${exchange} | name |
| Log | ${value} |
=>\n
Dictionary size is 9 and it contains following items:
| arguments | {u'arg1': u'value1', u'arg2': [u'list_value', True, u'18080'], u'alternate-exchange': u'amq.topic'} |
| auto_delete | False |
| durable | True |
| incoming | [] |
| internal | False |
| name | testExchange |
| outgoing | [] |
| type | fanout |
| vhost | / |
${value} = testExchange
"""
path = f'/exchanges/{self._quote_vhost(vhost)}/{quote(exchange_name)}'
response = requests.get(self.http_connection.url + path,
auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def exchanges(self) -> List[Dict[str, Any]]:
""" List of exchanges.
*Returns:*\n
List of exchanges in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| ${exchanges}= | Exchanges |
| Log List | ${exchanges} |
| ${item}= | Get From list | ${exchanges} | 1 |
| ${name}= | Get From Dictionary | ${q} | name |
=>\n
List length is 8 and it contains following items:
| 0 | {'name': '', 'durable': True, 'vhost': '/', 'internal': False, 'message_stats': [], 'arguments': {}, 'type': 'direct', 'auto_delete': False} |
| 1 | {'name': 'amq.direct', 'durable': True, 'vhost': '/', 'internal': False, 'message_stats': [], 'arguments': {}, 'type': 'direct', 'auto_delete': False} |
...\n
${name} = amq.direct
"""
url = self.http_connection.url + '/exchanges'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_names_of_all_exchanges(self) -> List[str]:
""" List of names of all exchanges.
*Returns:*\n
List of names of all exchanges.
*Example:*\n
| ${names}= | Get Names Of All Exchanges |
| Log List | ${names} |
=>\n
| List has one item:
| amq.direct
"""
return [item['name'] for item in self.exchanges()]
def get_exchanges_on_vhost(self, vhost: str = '%2F') -> List[Dict[str, Any]]:
""" List of exchanges on virtual host.
*Returns:*\n
List of exchanges in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Args:*\n
_vhost_ - virtual host name (quoted with requests.utils.quote);
"""
url = self.http_connection.url + '/exchanges/' + self._quote_vhost(vhost)
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_names_of_exchanges_on_vhost(self, vhost: str = '%2F') -> List[str]:
"""List of exchanges names on virtual host.
*Args:*\n
_vhost_: virtual host name (quoted with requests.utils.quote);
*Returns:*\n
List of exchanges names.
*Example:*\n
| ${names}= | Get Names Of Exchanges On Vhost |
| Log List | ${names} |
=>\n
| List has one item:
| federation: ex2 -> rabbit@server.net.ru
"""
return [item['name'] for item in self.get_exchanges_on_vhost(vhost)]
def get_queue(self, queue_name: str, vhost: str = '%2F') -> Dict[str, Any]:
"""
Get information about queue.
Parameters are quoted with requests.utils.quote.
*Args:*\n
_queue_name_ - queue name;\n
_vhost_ - virtual host name (quoted with requests.utils.quote);\n
*Returns:*\n
Dictionary with information about queue.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| ${queue}= | Get Queue | queue_name=testQueue | vhost=/ |
| Log Dictionary | ${queue} |
| ${value}= | Get From Dictionary | ${queue} | name |
| Log | ${value} |
=>\n
Dictionary size is 23 and it contains following items:
| arguments | {u'arg1': u'value1', u'arg2': [u'list_value', False, u'15240']} |
| auto_delete | False |
| backing_queue_status | {u'q1': 0, u'q3': 0, u'q2': 0, u'q4': 0, u'avg_ack_egress_rate': 0.0, u'ram_msg_count': 0, u'ram_ack_count': 0, u'len': 0, u'persistent_count': 0, u'target_ram_count': u'infinity', u'next_seq_id': 0, u'delta': [u'delta', u'undefined', 0, u'undefined'], u'pending_acks': 0, u'avg_ack_ingress_rate': 0.0, u'avg_egress_rate': 0.0, u'avg_ingress_rate': 0.0} |
| consumer_details | [] |
| consumer_utilisation | |
| consumers | 0 |
| deliveries | [] |
| durable | True |
| exclusive_consumer_tag | |
| idle_since | 2014-09-16 7:37:35 |
| incoming | [{u'stats': {u'publish_details': {u'rate': 0.0}, u'publish': 5}, u'exchange': {u'vhost': u'/', u'name': u'testExchange'}}] |
| memory | 34528 |
| messages | 0 |
| messages_details | {u'rate': 0.0} |
| messages_ready | 0 |
| messages_ready_details | {u'rate': 0.0} |
| messages_unacknowledged | 0 |
| messages_unacknowledged_details | {u'rate': 0.0} |
| name | testQueue |
| node | rabbit@primary |
| policy | |
| state | running |
| vhost | / |
${value} = testQueue
"""
path = f'/queues/{self._quote_vhost(vhost)}/{quote(queue_name)}'
response = requests.get(self.http_connection.url + path,
auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def queues(self) -> List[Dict[str, Any]]:
""" List of queues.
*Returns:*\n
List of queues in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/queues'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_queues_on_vhost(self, vhost: str = '%2F') -> List[Dict[str, Any]]:
""" List of queues on virtual host.
*Args:*\n
_vhost_ - virtual host name (quoted with requests.utils.quote);\n
*Returns:*\n
List of queues in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/queues/' + self._quote_vhost(vhost)
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_names_of_queues_on_vhost(self, vhost: str = '%2F') -> List[str]:
"""
List of queues names on virtual host.
*Args:*\n
_vhost_: virtual host name (quoted with requests.utils.quote);
*Returns:*\n
List of queues names.
*Example:*\n
| ${names}= | Get Names Of Queues On Vhost |
| Log List | ${names} |
=>\n
| List has one item:
| federation: ex2 -> rabbit@server.net.ru
"""
return [item['name'] for item in self.get_queues_on_vhost(vhost)]
def get_binding_exchange_with_queue_list(self, exchange_name: str, queue_name: str,
vhost: str = '%2F') -> List[Dict[str, Any]]:
"""
Get information about bindings of exchange with queue.
Parameters are quoted with requests.utils.quote.
*Args:*\n
_exchange_name_ - exchange name;\n
_queue_name_ - queue name;\n
_vhost_ - virtual host name (quoted with requests.utils.quote);\n
*Returns:*\n
List of bindings of exchange with queue in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| @{bind}= | Get Binding Exchange With Queue List | exchange_name=testExchange | queue_name=testQueue | vhost=/ |
| Log Dictionary | ${bind[0]} |
| Log | ${bind[0]["vhost"]} |
=>\n
Dictionary size is 7 and it contains following items:
| arguments | {'arg1': 'value1', 'arg2': ['str1', False]} |
| destination | testQueue |
| destination_type | queue |
| properties_key | ~2_oPmnDANCoVhkSJTkivZw |
| routing_key: | |
| source | testExchange |
| vhost: | / |
"""
path = '/bindings/{vhost}/e/{exchange}/q/{queue}'.format(
vhost=self._quote_vhost(vhost),
exchange=quote(exchange_name),
queue=quote(queue_name))
response = requests.get(self.http_connection.url + path,
auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def get_message(self, queue_name: str, count: int, requeue: bool, encoding: str, truncate: int = None,
vhost: str = '%2F', ackmode: str = 'ack_requeue_true') -> List[Dict[str, Any]]:
"""
Get message from the queue.
*Args:*\n
_queue_name_ - queue name;\n
_count_ - number of messages to get;\n
_requeue_ - re-placing received message in the queue with setting of redelivered attribute (true, false);\n
_encoding_ - message encoding (auto, base64);\n
_truncate_ - size of the message split (in bytes) in case it is greater than specified parameter (optional);\n
_vhost_ - virtual host name (quoted with requests.utils.quote);\n
_ackmode_ - determines whether the messages will be removed from the queue.
If ackmode is ack_requeue_true or reject_requeue_true they will be requeued.
If ackmode is ack_requeue_false or reject_requeue_false they will be removed;\n
*Returns:*\n
List with information about returned messages in dictionary format.
Body of the message in the dictionary is "payload" key.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
*Example:*\n
| ${msg}= | Get Message | queue_name=testQueue | count=2 | requeue=false | encoding=auto | truncate=50000 | vhost=/ |
| Log List | ${msg} |
=>\n
List length is 5 and it contains following items:
| 0 | {'payload': 'message body 0', 'exchange': 'testExchange', 'routing_key': 'testQueue', 'payload_bytes': 14, 'message_count': 4, 'payload_encoding': 'string', 'redelivered': False, 'properties': []} |
| 1 | {'payload': 'message body 1', 'exchange': 'testExchange', 'routing_key': 'testQueue', 'payload_bytes': 14, 'message_count': 3, 'payload_encoding': 'string', 'redelivered': False, 'properties': []} |
| ... |
"""
path = f'/queues/{self._quote_vhost(vhost)}/{quote(queue_name)}/get'
body = {"count": count, "requeue": requeue, "encoding": encoding, "ackmode": ackmode}
if truncate is not None:
body["truncate"] = truncate
response = requests.post(self.http_connection.url + path,
auth=self.http_connection.auth,
headers=self._prepare_request_headers(body=body),
data=json.dumps(body),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def vhosts(self) -> List[Dict[str, Any]]:
""" List of virtual hosts.
*Returns:*\n
List of virtual hosts in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/vhosts'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def nodes(self) -> List[Dict[str, Any]]:
""" List of nodes.
*Returns:*\n
List of nodes in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/nodes'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json()
def _cluster_name(self) -> List[Dict[str, Any]]:
""" List of clusters.
*Returns:*\n
List of clusters in JSON format.
*Raises:*\n
raise HTTPError if the HTTP request returned an unsuccessful status code.
"""
url = self.http_connection.url + '/cluster-name'
response = requests.get(url, auth=self.http_connection.auth,
headers=self._prepare_request_headers(),
timeout=self.http_connection.timeout)
response.raise_for_status()
return response.json() | /robotframework-rabbitmq-3.0.0.tar.gz/robotframework-rabbitmq-3.0.0/src/RabbitMq.py | 0.886923 | 0.150778 | RabbitMq.py | pypi |
import select
import socket
import six
import robot
from robot.libraries.BuiltIn import BuiltIn
from pyrad import packet, dictionary, tools
# Default receive timeout
TIMEOUT = 10.0
# Default Radius dictionary file
DEFAULT_DICT = 'dictionary'
class RadiusLibrary(object):
"""``RadiusLibrary`` is a test library providing keywords for handling the RADIUS protocol.
This library uses the pyrad package for RADIUS protocol handling.
Pyrad source code is located at https://github.com/wichert/pyrad. The library supports the creation of RADIUS clients and servers, and supports authentication, accounting and change of authorization requests.
Multiple client and server sessions can be create through the use the `alias` parameter.
= Examples =
== Client ==
Example of client authentication session:
| `Create Client` | server=127.0.0.1 | port=1812 | secret=mysecret |
| `Create Access Request` |
| `Add Request Attribute` | User-Name | subscriber |
| `Add Request Attribute` | User-Password | mypass |
| `Add Request Attribute` | Acct-Session-Id | someid |
| `Send Request` |
| `Receive Access Accept` | timeout=5.0 |
| `Response Should Contain Attribute` | Framed-IP-Address | 10.0.0.100 |
Example of client accounting session:
| `Create Client` | server=127.0.0.1 | port=1813 | secret=mysecret |
| `Create Access Request` |
| `Create Accounting Request` |
| `Add Request Attribute` | User-Name | subscriber |
| `Add Request Attribute` | Acct-Session-Id | someid |
| `Add Request Attribute` | Acct-Status-Type | Start |
| `Send Request` |
| `Receive Accounting Response` |
== Server ==
Example of server session:
| `Create Server` | server=127.0.0.1 | port=1812 | secret=mysecret |
| `Receive Access Request` |
| `Request Should Contain Attribute` | User-Name | subscriber |
| `Request Should Contain Attribute` | User-Password | mypass |
| `Request Should Contain Attribute` | Acct-Session-Id |
| `Create Access Accept` |
| `Add Request Attribute` | Framed-IP-Address | 10.0.0.100 |
| `Send Response` |
"""
ROBOT_LIBRARY_SCOPE = 'TEST CASE'
def __init__(self):
self._client = robot.utils.ConnectionCache('No Clients Created')
self._server = robot.utils.ConnectionCache('No Servers Created')
self.builtin = BuiltIn()
def create_client(self, alias, address, port,
secret, raddict=DEFAULT_DICT,
authenticator=True):
""" Create Client: create a RADIUS session to a server.
- ``alias:`` Alias to identify the session to use.
- ``address:`` IP address of the RADIUS server.
- ``port:`` IP port of the RADIUS server.
- ``secret:`` RADIUS secret.
- ``raddict:`` Path to RADIUS dictionary.
- ``authenticator:`` Authenticator boolean switch.
Examples:
| Create Client | auth_client | 127.0.0.1 | 1812 | mysecret | |
| Create Client | acct_client | 127.0.0.1 | 1813 | mysecret | dictionary=mydict |
| Create Client | coa_client | 127.0.0.1 | 3799 | mysecret | authenticator=False |
The next step after creating a client is to create a request, using the `Create Access Request` keyword for example.
After creating a client, it is ready to send requests using the `Receive Access Request` keyword for example.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('', 0))
sock.settimeout(3.0)
sock.setblocking(0)
request = robot.utils.ConnectionCache('No Client Sessions Created')
response = robot.utils.ConnectionCache('No Client Response Created')
session = {'sock': sock,
'address': str(address),
'port': int(port),
'secret': six.b(str(secret)),
'dictionary': dictionary.Dictionary(raddict),
'authenticator': authenticator,
'request': request,
'response': response}
self._client.register(session, alias=alias)
return session
def _create_request(self, alias, code):
client = self._get_session(self._client,alias)
secret = client['secret']
dictionary = client['dictionary']
if code == packet.AccessRequest:
request = packet.AuthPacket(code=code, secret=secret,
dict=dictionary)
elif code in [packet.AccountingRequest, packet.CoARequest, packet.DisconnectRequest]:
request = packet.AcctPacket(code=code, secret=secret,
dict=dictionary)
client['request'].register(request, str(request.id))
return request
def create_access_request(self,alias=None):
""" Creates an access request.
- ``alias:`` alias to identify the session to use.
"""
return self._create_request(alias,packet.AccessRequest)
def create_accounting_request(self,alias=None):
""" Creates an accounting request.
- ``alias:`` alias to identify the session to use.
"""
return self._create_request(alias,packet.AccountingRequest)
def create_coa_request(self,alias=None):
""" Creates an coa request.
- ``alias:`` alias to identify the session to use.
"""
return self._create_request(alias,packet.CoARequest)
def create_disconnect_request(self,alias=None):
""" Creates a disconnect request.
- ``alias:`` alias to identify the session to use.
"""
return self._create_request(alias,packet.DisconnectRequest)
def _add_attribute(self, cache, key, value, alias):
key = str(key)
if isinstance(value, unicode):
value = str(value)
client = self._get_session(cache,alias)
if cache == self._client:
packet = client['request'].get_connection(alias)
if cache == self._server:
packet = client['response'].get_connection(alias)
attr_dict_item = packet.dict.attributes[key]
if attr_dict_item.type == 'integer':
if attr_dict_item.values.HasForward(value) == False:
value = int(value)
elif attr_dict_item.type == 'string':
value = str(value)
if attr_dict_item.encrypt == 1:
value = packet.PwCrypt(value)
packet.AddAttribute(key,value)
def add_request_attribute(self, key, value, alias=None):
"""Adds attribute to the created RADIUS request.
- ``key:`` RADIUS attribute identifier, ie User-Name, Acct-Session-Id.
- ``value:`` RADIUS attribute value.
- ``alias:`` alias to identify the client session to use.
"""
return self._add_attribute(self._client, key, value, alias)
def send_request(self, alias=None):
"""Sends RADIUS client request using session specified by `alias`.
- ``key:`` RADIUS attribute identifier, ie User-Name, Acct-Session-Id.
- ``value:`` RADIUS attribute value.
- ``alias:`` alias to identify the client session to use.
"""
client = self._get_session(self._client,alias)
request = client['request'].get_connection(alias)
pdu = request.RequestPacket()
client['sock'].sendto(pdu, (client['address'], client['port']))
return dict(request)
def _receive_response(self,alias,code,timeout):
client = self._get_session(self._client, alias)
ready = select.select([client['sock']], [], [], float(timeout))
pkt = None
if ready[0]:
data, addr = client['sock'].recvfrom(1024)
pkt = packet.Packet(secret=client['secret'], packet=data,
dict=client['dictionary'])
client['response'].register(pkt,str(pkt.id))
self.builtin.log(pkt.keys())
if pkt.code != code:
# TODO: name packet code instead of id.
self.builtin.log('Expected {0}, received {1}'.format(code, pkt.code))
raise Exception("received {}".format(pkt.code))
if pkt is None:
raise Exception("Did not receive any answer")
return pkt
def receive_access_accept(self, alias=None, timeout=TIMEOUT):
""" Receives an access accept.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_response(alias, packet.AccessAccept, timeout)
def receive_access_reject(self, alias=None, timeout=TIMEOUT):
""" Receives an access reject.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_response(alias, packet.AccessReject, timeout)
def receive_accounting_response(self, alias=None, timeout=TIMEOUT):
""" Receives an accounting response.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_response(alias, packet.AccountingResponse, timeout)
def receive_coa_ack(self, alias=None, timeout=TIMEOUT):
""" Receives a coa ack response.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.CoARequest, timeout)
def receive_coa_nack(self, alias=None, timeout=TIMEOUT):
""" Receives a coa nack response.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.CoARequest, timeout)
def response_should_contain_attribute(self, key, val=None, alias=None):
""" Checks RADIUS response if specified `key`, or `key value` exists.
If not, An error will be raised.
- ``key:`` RADIUS attribute identifier, ie Framed-IP-Address.
- ``value:`` RADIUS attribute value.
- ``key:`` Alias to identify the servr session to use.
"""
return self._should_contain_attribute(self._client,key,val,alias)
# Server section
def create_server(self, alias=None, address='127.0.0.1', port=0, secret='secret', raddict=DEFAULT_DICT):
""" Creates a RADIUS server.
- ``alias:`` Alias to identify the servr session to use.
- ``address:`` IP address of the RADIUS server.
- ``port:`` IP port of the RADIUS server.
- ``secret:`` RADIUS secret.
- ``raddict:`` Path to RADIUS dictionary.
Examples:
| Create Server | auth_server | 127.0.0.1 | 1812 | mysecret | |
| Create Server | acct_server | 127.0.0.1 | 1813 | mysecret | dictionary=mydict |
| Create Server | coa_server | 127.0.0.1 | 3799 | mysecret | |
After creating a server it is ready to receive requests using the `Receive Access Request` keyword for example.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((address, int(port)))
#sock.settimeout(3.0)
sock.setblocking(0)
request = robot.utils.ConnectionCache('No Server Requests Created')
response = robot.utils.ConnectionCache('No Server Responses Created')
server = {'sock': sock,
'secret': six.b(str(secret)),
'dictionary': dictionary.Dictionary(raddict),
'request':request,
'response':response}
self._server.register(server, alias=alias)
return server
def _receive_request(self,alias,code,timeout):
server = self._get_session(self._server, alias)
ready = select.select([server['sock']], [], [], float(timeout))
pkt = None
if ready[0]:
data, addr = server['sock'].recvfrom(1024)
pkt = packet.Packet(secret=server['secret'], packet=data,
dict=server['dictionary'])
server['request'].register(pkt,str(pkt.id))
self.builtin.log(pkt.code)
if pkt.code != code:
self.builtin.log('Expected {0}, received {1}'.format(code, pkt.code))
raise Exception("received {}".format(pkt.code))
if pkt is None:
raise Exception("Did not receive any answer")
pkt.addr = addr
return pkt
def request_should_contain_attribute(self, key, val=None, alias=None):
""" Checks RADIUS request if specified `key`, or `key value` exists.
If not, An error will be raised.
- ``key:`` RADIUS attribute identifier, ie Framed-IP-Address.
- ``value:`` RADIUS attribute value.
- ``key:`` Alias to identify the servr session to use.
"""
return self._should_contain_attribute(self._server,key,val,alias)
def _create_response(self, alias, code):
session = self._get_session(self._server,alias)
request = session['request'].get_connection(alias)
reply = request.CreateReply()
reply.code = code
session['response'].register(reply,str(reply.code))
#todo: deregister request
return reply
def create_access_accept(self, alias=None):
""" Creates an access accept response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.AccessAccept)
def create_access_reject(self, alias=None):
""" Creates an access accept.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.AccessReject)
def create_accounting_response(self, alias=None):
""" Creates an accounting response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.AccountingResponse)
def create_coa_ack(self, alias=None):
""" Creates a coa ack response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.CoAACK)
def create_coa_nack(self, alias=None):
""" Creates a coa nack response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.CoANAK)
def create_disconnect_ack(self, alias=None):
""" Creates a disconnect ack response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.DisconnectACK)
def create_disconnect_nack(self, alias=None):
""" Creates a disconnect nack response.
- ``alias:`` alias to identify the session to use.
"""
return self._create_response(alias,packet.DisconnectNAK)
def add_response_attribute(self, key, value, alias=None):
"""Adds attribute to the created RADIUS response.
- ``key:`` RADIUS attribute identifier, ie User-Name, Acct-Session-Id.
- ``value:`` RADIUS attribute value.
- ``alias:`` alias to identify the client session to use.
"""
return self._add_attribute(self._server, key, value, alias)
def send_response(self, alias=None):
"""Sends RADIUS server resoponse using session specified by `alias`.
- ``alias:`` alias to identify the client session to use.
"""
server = self._get_session(self._server, alias)
request = server['request'].get_connection(alias)
response = server['response'].get_connection(alias)
pdu = response.ReplyPacket()
server['sock'].sendto(pdu, request.addr)
return request
def receive_accounting_request(self, alias=None, timeout=TIMEOUT):
""" Receives an accounting request.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.AccountingRequest, timeout)
def receive_coa_request(self, alias=None, timeout=TIMEOUT):
""" Receives a coa request.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.CoARequest, timeout)
def receive_disconnect_request(self, alias=None, timeout=TIMEOUT):
""" Receives a disconnect request.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.DisconnectRequest, timeout)
def receive_access_request(self, alias=None, timeout=TIMEOUT):
""" Receives an access request.
- ``alias:`` alias to identify the session to use.
- ``timeout:`` Sets receive timeout in seconds(float).
"""
return self._receive_request(alias, packet.AccessRequest, timeout)
def _get_session(self, cache, alias):
# Switch to related client alias
if alias:
return cache.switch(alias)
else:
return cache.get_connection()
def _should_contain_attribute(self, cache, key, val, alias):
session=self._get_session(cache, alias)
request = None
if cache == self._client:
request = session['response'].get_connection(alias)
elif cache == self._server:
request = session['request'].get_connection(alias)
else:
raise BaseException('No match for cache')
if not val:
if str(key) in request:
return True
else:
raise BaseException('Key {} not found in {}'.format(key,str(request.keys())))
else:
if str(key) in request and val in request[str(key)]:
return
else:
raise BaseException('value "{}" not in {}'.format(val,request[str(key)])) | /robotframework-radius-0.3.1.tar.gz/robotframework-radius-0.3.1/RadiusLibrary/radiuslibrary.py | 0.807233 | 0.360517 | radiuslibrary.py | pypi |
class ConditionParser(object):
def __init__(self, condition):
import re
logicals = re.split('(&&|\|\|)', condition)
self.conditions = self._get_individual_conditions(logicals)
def _get_individual_conditions(self, logicals):
conditions = []
for element in logicals:
if element in ('&&', '||'):
conditions.append(element)
else:
conditions.append(ExpressionEvaluator(element))
return conditions
def evaluate(self, msg_fields):
status = True
operator = '&&'
for condition in self.conditions:
if condition in ('&&', '||'):
operator = condition
else:
evaluated = condition.evaluate(msg_fields)
if operator == '&&':
status = status and evaluated
else:
status = status or evaluated
return status
class ExpressionEvaluator(object):
def __init__(self, condition):
if '==' in condition:
self.name, self.value = self._parse('==', condition)
def evaluate(msg_fields):
return self._get_field(msg_fields) == self.value
self.evaluate = evaluate
elif '!=' in condition:
self.name, self.value = self._parse('!=', condition)
def evaluate(msg_fields):
return self._get_field(msg_fields) != self.value
self.evaluate = evaluate
else:
raise IllegalConditionException('Unsupported operation: %s' % condition)
def _parse(self, operator, condition):
cond = condition.partition(operator)
name = cond[0].strip()
if not name:
raise IllegalConditionException('Illegal condition: %s' % condition)
value = self._parse_value(cond[2].strip())
return name, value
def _parse_value(self, value):
try:
return int(value)
except:
raise IllegalConditionException('Expected integer, unsupported value given: %s' % value)
def _get_field(self, elem):
for part in self.name.split('.'):
if part not in elem:
raise IllegalConditionException('Given name condition: %s not found in message fields' % self.name)
elem = elem[part]
return elem.int
class IllegalConditionException(Exception):
pass | /robotframework_rammbock_py3-0.4.0.2-py3-none-any.whl/Rammbock/condition_parser.py | 0.450118 | 0.175079 | condition_parser.py | pypi |
import sys
from six import itervalues
if sys.version_info < (3,):
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from collections import KeysView, ValuesView, ItemsView
except ImportError:
pass
else:
try:
from _thread import get_ident as _get_ident
except ImportError:
from _dummy_thread import get_ident as _get_ident
try:
from collections.abc import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in itervalues(self.__map):
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(self, *args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 1:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
# Make progressively weaker assumptions about "other"
other = args[0] if args else ()
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return self.__class__, (items,), inst_dict
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self) == len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
# end of http://code.activestate.com/recipes/576693/ }}} | /robotframework_rammbock_py3-0.4.0.2-py3-none-any.whl/Rammbock/ordered_dict.py | 0.424889 | 0.220007 | ordered_dict.py | pypi |
import os
from robot.libraries.BuiltIn import BuiltIn
from .core import RammbockCore
from .message_sequence import SeqdiagGenerator
from .version import VERSION
class Rammbock(RammbockCore):
"""Rammbock is a binary protocol testing library for Robot Test Automation Framework.
To use Rammbock you need to first define a protocol, start the clients and servers you are going to mock,
and then define a message template for each message you are going to send or receive.
Example:
| *Settings * |
| Library | Rammbock |
| *Test Cases* |
| Send message | Define simple protocol |
| | Start server |
| | Start client |
| | Send message | status:0xcafebabe |
| | Verify server gets status | 0xcafebabe |
| | [Teardown] | `Reset Rammbock` |
| *Keywords* |
| Define simple protocol | `New protocol` | SimpleProtocol |
| | `u8` | msgId |
| | `u8` | messageLength |
| | `pdu` | messageLength - 2 |
| | `End protocol` |
| |
| Start server | `Start UDP server` | 127.0.0.1 | 8282 | protocol=SimpleProtocol |
| |
| Start client | `Start UDP client` | protocol=SimpleProtocol |
| | `Connect` | 127.0.0.1 | 8282 |
| | | | | |
| Send message | [Arguments] | @{params} |
| | `New message` | SimpleRequest | SimpleProtocol | msgId:0xff |
| | `u32` | status |
| | `Client sends message` | @{params} |
| |
| Verify server gets status | [Arguments] | ${status} |
| | ${msg} = | `Server receives message` |
| | Should be equal | ${msg.status.hex} | ${status} |
"""
ROBOT_LIBRARY_VERSION = VERSION
def u8(self, name, value=None, align=None):
"""Add an unsigned 1 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(1, name, value, align)
def u16(self, name, value=None, align=None):
"""Add an unsigned 2 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(2, name, value, align)
def u24(self, name, value=None, align=None):
"""Add an unsigned 3 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(3, name, value, align)
def u32(self, name, value=None, align=None):
"""Add an unsigned 4 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(4, name, value, align)
def u40(self, name, value=None, align=None):
"""Add an unsigned 5 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(5, name, value, align)
def u64(self, name, value=None, align=None):
"""Add an unsigned 8 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(8, name, value, align)
def u128(self, name, value=None, align=None):
"""Add an unsigned 16 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(16, name, value, align)
def i8(self, name, value=None, align=None):
"""Add an 1 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length."""
self.int(1, name, value, align)
def i32(self, name, value=None, align=None):
"""Add an 32 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length."""
self.int(4, name, value, align)
def array(self, size, type, name, *parameters):
"""Define a new array of given `size` and containing fields of type `type`.
`name` if the name of this array element. The `type` is the name of keyword that is executed as the contents of
the array and optional extra parameters are passed as arguments to this keyword.
Examples:
| Array | 8 | u16 | myArray |
| u32 | length |
| Array | length | someStruct | myArray | <argument for someStruct> |
"""
self._new_list(size, name)
BuiltIn().run_keyword(type, '', *parameters)
self._end_list()
def container(self, name, length, type, *parameters):
"""Define a container with given length.
This is a convenience method creating a `Struct` with `length` containing fields defined in `type`.
"""
self.new_struct('Container', name, 'length=%s' % length)
BuiltIn().run_keyword(type, *parameters)
self.end_struct()
def case(self, size, kw, *parameters):
"""An element inside a bag started with `Start Bag`.
The first argument is size which can be absolute value like `1`, a range
like `0-3`, or just `*` to accept any number of elements.
Examples:
| Start bag | intBag |
| case | 0-1 | u8 | foo | 42 |
| case | 0-2 | u8 | bar | 1 |
| End bag |
"""
# TODO: check we are inside a bag!
self._start_bag_case(size)
BuiltIn().run_keyword(kw, *parameters)
self._end_bag_case()
def embed_seqdiag_sequence(self):
"""Create a message sequence diagram png file to output folder and embed the image to log file.
You need to have seqdiag installed to create the sequence diagram. See http://blockdiag.com/en/seqdiag/
"""
test_name = BuiltIn().replace_variables('${TEST NAME}')
outputdir = BuiltIn().replace_variables('${OUTPUTDIR}')
path = os.path.join(outputdir, test_name + '.seqdiag')
SeqdiagGenerator().compile(path, self._message_sequence) | /robotframework_rammbock_py3-0.4.0.2-py3-none-any.whl/Rammbock/rammbock.py | 0.711631 | 0.487429 | rammbock.py | pypi |
from math import ceil
import math
import sys
import re
from Rammbock.message import Field, BinaryField
from Rammbock.binary_tools import to_bin_of_length, to_0xhex, to_tbcd_binary, \
to_tbcd_value, to_bin, to_twos_comp, to_int
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from robot.utils import PY3, is_bytes, py2to3
@py2to3
class _TemplateField(object):
def __init__(self, name, default_value):
self._set_default_value(default_value)
self.name = name
has_length = True
can_be_little_endian = False
referenced_later = False
def get_static_length(self):
if not self.length.static:
raise IndexError('Length of %s is dynamic.' % self._get_name())
return self.length.value
def _get_element_value(self, paramdict, name=None):
return paramdict.get(self._get_name(name), self.default_value)
def _get_element_value_and_remove_from_params(self, paramdict, name=None):
wild_card = paramdict.get('*') if not self.referenced_later else None
return paramdict.pop(self._get_name(name),
self.default_value or wild_card)
def encode(self, paramdict, parent, name=None, little_endian=False):
value = self._get_element_value_and_remove_from_params(paramdict, name)
if not value and self.referenced_later:
return PlaceHolderField(self)
return self._to_field(name, value, parent, little_endian=little_endian)
def _to_field(self, name, value, parent, little_endian=False):
field_name, field_value = self._encode_value(value, parent, little_endian=little_endian)
return Field(self.type, self._get_name(name), field_name, field_value, little_endian=little_endian)
def decode(self, data, message, name=None, little_endian=False):
data = self._prepare_data(data)
length, aligned_length = self.length.decode_lengths(message, len(data))
if len(data) < aligned_length:
raise Exception("Not enough data for '%s'. Needs %s bytes, given %s" % (self._get_recursive_name(message), aligned_length, len(data)))
return Field(self.type,
self._get_name(name),
data[:length],
aligned_len=aligned_length,
little_endian=little_endian and self.can_be_little_endian)
def _prepare_data(self, data):
return BuiltIn().convert_to_bytes(data)
def validate(self, parent, paramdict, name=None):
name = name or self.name
field = parent[name]
value = field.bytes
forced_value = self._get_element_value_and_remove_from_params(paramdict, name)
logger.trace("Forced_value: '{}' of type '{}'. Value is '{}".format(forced_value, type(forced_value), value))
if PY3 and is_bytes(forced_value):
forced_value = BuiltIn().convert_to_string(forced_value)
logger.trace('Forced_value converted to {}. Value is {}'.format(forced_value, value))
try:
if not forced_value or forced_value == 'None':
return []
elif forced_value.startswith('('):
return self._validate_pattern(forced_value, value, field)
except AttributeError as e:
e.args = ('Validating {}:{} failed. {}.\n Did you set default value as numeric object instead of string?'
.format(name, forced_value, e.args[0]),)
raise e
if forced_value.startswith('REGEXP'):
return self._validate_regexp(forced_value, value, field)
return self._validate_exact_match(forced_value, value, field)
def _validate_regexp(self, forced_pattern, value, field):
return ["Value of field '%s' can not be matched to regular expression pattern '%s'" %
(field._get_recursive_name(), forced_pattern)]
def _validate_pattern(self, forced_pattern, value, field):
if self._validate_or(forced_pattern, value, field):
return []
if self._validate_masked(forced_pattern, value):
return []
return ["Value of field '%s' does not match pattern '%s!=%s'" %
(field._get_recursive_name(), BuiltIn().convert_to_string(to_0xhex(value)), forced_pattern)]
def _validate_or(self, forced_pattern, value, field):
if forced_pattern.find('|') != -1:
patterns = forced_pattern[1:-1].split('|')
for pattern in patterns:
if self._is_match(pattern, value, field._parent):
return True
return False
def _validate_masked(self, forced_pattern, value):
if forced_pattern.find('&') != -1:
masked_val, masked_field = self._apply_mask_to_values(forced_pattern, value)
if masked_val == masked_field:
return True
return False
def _apply_mask_to_values(self, forced_pattern, value):
val = forced_pattern[1:-1].split('&')[0].strip()
mask = forced_pattern[1:-1].split('&')[1].strip()
return to_int(val) & to_int(mask), to_int(to_0xhex(value)) & to_int(mask)
def _is_match(self, forced_value, value, parent):
# TODO: Should pass msg
forced_binary_val, _ = self._encode_value(forced_value, parent)
return forced_binary_val == value
def _validate_exact_match(self, forced_value, value, field):
if not self._is_match(forced_value, value, field._parent):
return ['Value of field %s does not match %s!=%s' %
(field._get_recursive_name(), BuiltIn().convert_to_string(self._default_presentation_format(value)), forced_value)]
return []
def _default_presentation_format(self, value):
return to_0xhex(value)
def _get_name(self, name=None):
return name or self.name or self.type
def _raise_error_if_no_value(self, value, parent):
if value in (None, ''):
raise AssertionError('Value of %s not set' % self._get_recursive_name(parent))
def _get_recursive_name(self, parent):
if not parent:
return self.name
return parent._get_recursive_name() + self.name
def _set_default_value(self, value):
self.default_value = BuiltIn().convert_to_string(value) if value and value != '""' else None
class PlaceHolderField(object):
_type = 'referenced_later'
_parent = None
def __init__(self, template):
self.template = template
class UInt(_TemplateField):
type = 'uint'
can_be_little_endian = True
def __init__(self, length, name, default_value=None, align=None):
_TemplateField.__init__(self, name, default_value)
self.length = Length(length, align)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
length, aligned_length = self.length.decode_lengths(message)
binary = to_bin_of_length(length, value)
binary = binary[::-1] if little_endian else binary
return binary, aligned_length
class Int(UInt):
type = 'int'
can_be_little_endian = True
def __init__(self, length, name, default_value=None, align=None):
UInt.__init__(self, length, name, default_value, align)
def _get_int_value(self, message, value):
bin_len = self.length.decode_lengths(message)[0] * 8
min = pow(-2, (bin_len - 1))
max = pow(2, (bin_len - 1)) - 1
if not min <= to_int(value) <= max:
raise AssertionError('Value %s out of range (%d..%d)'
% (value, min, max))
return to_twos_comp(value, bin_len)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
value = self._get_int_value(message, value)
return UInt._encode_value(self, value, message, little_endian)
class Char(_TemplateField):
type = 'chars'
def __init__(self, length, name, default_value=None, terminator=None):
_TemplateField.__init__(self, name, default_value)
self._terminator = to_bin(terminator)
self.length = Length(length)
def _encode_value(self, value, message, little_endian=False):
if isinstance(value, Field):
value = value._value
else:
value = str(value or '')
if PY3:
value = BuiltIn().convert_to_bytes(value)
value += self._terminator
length, aligned_length = self.length.find_length_and_set_if_necessary(message, len(value))
return value.ljust(length, b'\x00'), aligned_length
def _prepare_data(self, data):
if PY3 and isinstance(data, str):
data = data.encode("UTF-8")
if self._terminator:
return data[0:data.index(self._terminator) + len(self._terminator)]
return data
def _validate_regexp(self, forced_pattern, value, field):
try:
regexp = forced_pattern.split(':')[1].strip()
if bool(re.match(regexp, field.ascii)):
return []
else:
return ['Value of field %s does not match the RegEx %s!=%s' %
(field._get_recursive_name(), self._default_presentation_format(value), forced_pattern)]
except re.error as e:
raise Exception("Invalid RegEx Error : " + str(e))
class Binary(_TemplateField):
type = 'bin'
def __init__(self, length, name, default_value=None):
_TemplateField.__init__(self, name, default_value)
self.length = Length(length)
if not self.length.static:
raise AssertionError('Binary field length must be static. Length: %s' % length)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
minimum_binary = to_bin(value)
length, aligned = self.length.decode_lengths(message, len(minimum_binary))
binary = to_bin_of_length(self._byte_length(length), value)
return binary, self._byte_length(aligned)
def _to_field(self, name, value, parent, little_endian=False):
field_name, field_value = self._encode_value(value, parent, little_endian=little_endian)
return BinaryField(self.length.value, self._get_name(name), field_name, field_value, little_endian=little_endian)
def _byte_length(self, length):
return int(ceil(length / 8.0))
def _is_match(self, forced_value, value, message):
forced_binary_val, _ = self._encode_value(forced_value, message) # TODO: Should pass msg
return int(to_0xhex(forced_binary_val), 16) == int(to_0xhex(value), 16)
class TBCD(_TemplateField):
type = 'tbcd'
def __init__(self, size, name, default_value):
_TemplateField.__init__(self, name, default_value)
self.length = Length(size)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
binary = to_tbcd_binary(value)
length = self.length.decode(message, len(binary))
return binary, self._byte_length(length)
def _default_presentation_format(self, value):
return to_tbcd_value(value)
def _byte_length(self, length):
return int(ceil(length / 2.0))
class PDU(_TemplateField):
type = 'pdu'
name = '__pdu__'
def __init__(self, length):
self.length = Length(length)
def encode(self, params, parent, little_endian=False):
return None
def validate(self, parent, paramdict, name=None):
return []
def Length(value, align=None):
value = str(value)
if align:
align = int(align)
else:
align = 1
if align < 1:
raise Exception('Illegal alignment %d' % align)
elif value.isdigit():
return _StaticLength(int(value), align)
elif value == '*':
return _FreeLength(align)
return _DynamicLength(value, align)
class _Length(object):
free = False
def __init__(self):
self.value = None
self.align = None
def decode_lengths(self, message, max_length=None):
raise Exception("Override this method in implementing class.")
def _get_aligned_lengths(self, length):
return length, length + (self.align - length % self.align) % self.align
def decode(self, message, maximum_length=None):
"""Decode the length of this field. Maximum length is the maximum
length available from data or None if maximum length is not known.
"""
return self.decode_lengths(message, maximum_length)[0]
class _StaticLength(_Length):
static = True
has_references = False
def __init__(self, value, align):
_Length.__init__(self)
self.value = int(value)
self.align = int(align)
def decode_lengths(self, message, max_length=None):
return self._get_aligned_lengths(self.value)
def find_length_and_set_if_necessary(self, message, min_length, little_endian=False):
return self._get_aligned_lengths(self.value)
class _FreeLength(_Length):
static = False
has_references = False
free = True
def __init__(self, align):
self.align = int(align)
def decode_lengths(self, message, max_length=None):
if max_length is None:
raise AssertionError('Free length (*) can only be used on context where maximum byte length is unambiguous')
return self._get_aligned_lengths(max_length)
def find_length_and_set_if_necessary(self, message, min_length):
return self._get_aligned_lengths(min_length)
class _DynamicLength(_Length):
static = False
has_references = True
def __init__(self, value, align):
self.field, self.value_calculator = parse_field_and_calculator(value)
self.field_parts = self.field.split('.')
self.align = int(align)
def calc_value(self, param):
return self.value_calculator.calc_value(param)
def solve_parameter(self, length):
return self.value_calculator.solve_parameter(length)
def decode_lengths(self, parent, max_length=None):
reference = self._find_reference(parent)
if not self._has_been_set(reference):
raise AssertionError('Value of %s not set' % self.field)
return self._get_aligned_lengths(self.calc_value(reference.int))
def _find_reference(self, parent):
field = self._get_field(parent)
if field:
return field
else:
parent = parent._parent
return self._find_reference(parent) if parent else None
def _get_field(self, elem):
for part in self.field_parts:
if part not in elem:
return None
elem = elem[part]
return elem
def _has_been_set(self, reference):
return reference._type != 'referenced_later'
def _set_length(self, reference, min_length, little_endian=False):
value_len, aligned_len = self._get_aligned_lengths(min_length)
reference._parent[self.field_parts[-1]] = \
self._encode_ref_length(self.solve_parameter(aligned_len),
reference,
little_endian=little_endian)
return value_len, aligned_len
def _encode_ref_length(self, aligned_len, reference, little_endian=False):
return reference.template.encode({self.field_parts[-1]: str(aligned_len)},
reference._parent, little_endian=little_endian)
def find_length_and_set_if_necessary(self, parent, min_length, little_endian=False):
reference = self._find_reference(parent)
if self._has_been_set(reference):
self._raise_error_if_not_enough_space(reference, self.solve_parameter(min_length))
return self._get_aligned_lengths(self.calc_value(reference.int))
return self._set_length(reference, min_length, little_endian=little_endian)
def _raise_error_if_not_enough_space(self, reference, min_length):
if reference.int < min_length:
raise IndexError("Value for length is too short")
@property
def value(self):
raise IndexError('Length is dynamic.')
class IllegalDynamicLengthException(Exception):
pass
def _partition(operator, value):
return (val.strip() for val in value.rpartition(operator))
def parse_field_and_calculator(value):
if "-" in value:
field, _, subtractor = _partition('-', value)
return field, Subtract(int(subtractor))
elif "+" in value:
field, _, add = _partition('+', value)
return field, Adder(int(add))
elif "*" in value:
field, _, multiplier = _partition('*', value)
return field, Multiplier(int(multiplier))
return value.strip(), SingleValue()
class SingleValue(object):
def calc_value(self, param):
return param
def solve_parameter(self, length):
return length
class Subtract(object):
def __init__(self, subtractor):
self.subtractor = subtractor
def calc_value(self, param):
return param - self.subtractor
def solve_parameter(self, length):
return length + self.subtractor
class Adder(object):
def __init__(self, add):
self.add = add
def calc_value(self, param):
return param + self.add
def solve_parameter(self, length):
return length - self.add
class Multiplier(object):
def __init__(self, multiplier):
self.multiplier = multiplier
def calc_value(self, param):
return param * self.multiplier
def solve_parameter(self, length):
return math.ceil(length / float(self.multiplier))
class BagSize(object):
fixed = re.compile(r'[1-9][0-9]*\Z')
range = re.compile(r'([0-9]+)\s*-\s*([1-9][0-9]*)\Z')
def __init__(self, size):
# TODO: add open range 2-n
size = size.strip()
if size == '*':
self._set_min_max(0, sys.maxsize)
elif self.fixed.match(size):
self._set_min_max(size, size)
elif self.range.match(size):
self._set_min_max(*self.range.match(size).groups())
else:
raise AssertionError("Invalid bag size %s." % size)
def _set_min_max(self, min_, max_):
self.min = int(min_)
self.max = int(max_)
if self.min > self.max:
raise AssertionError("Invalid bag size %s." % str(self))
def __str__(self):
if self.min == self.max:
return str(self.min)
elif self.min == 0 and self.max == sys.maxsize:
return '*'
return '%s-%s' % (self.min, self.max) | /robotframework_rammbock_py3-0.4.0.2-py3-none-any.whl/Rammbock/templates/primitives.py | 0.510496 | 0.212477 | primitives.py | pypi |
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(self, *args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 1:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
# Make progressively weaker assumptions about "other"
other = args[0] if args else ()
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return self.__class__, (items,), inst_dict
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self) == len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
# end of http://code.activestate.com/recipes/576693/ }}} | /robotframework-rammbock-0.4.0.1.tar.gz/robotframework-rammbock-0.4.0.1/src/Rammbock/ordered_dict.py | 0.628065 | 0.30106 | ordered_dict.py | pypi |
import binascii
import struct
try:
if bin(0):
pass
except NameError, name_error:
def bin(value):
"""
Support for Python 2.5
Based on a recipe by Benjamin Wiley Sittler.
http://code.activestate.com/recipes/219300-format-integer-as-binary-string/
"""
if value < 0:
return '-' + bin(-value)
out = []
if value == 0:
out.append('0')
while value > 0:
out.append('01'[value & 1])
value >>= 1
try:
return '0b' + ''.join(reversed(out))
except NameError:
out.reverse()
return '0b' + ''.join(out)
LONGLONG = struct.Struct('>Q')
def to_bin(string_value):
if string_value in (None, ''):
return ''
string_value = str(string_value)
if string_value.startswith('0x'):
return _hex_to_bin(string_value)
elif string_value.startswith('0b'):
return _int_to_bin(int(string_value.replace('0b', '')
.replace(' ', ''), 2))
return _int_to_bin(int(string_value))
def _int_to_bin(integer):
if integer >= 18446744073709551616L:
return to_bin(hex(integer))
return LONGLONG.pack(integer).lstrip('\x00') or '\x00'
def _hex_to_bin(string_value):
value = string_value.replace('0x', '').replace(' ', '').replace('L', '')
if len(value) % 2 == 1:
value = '0' + value
return binascii.unhexlify(value)
def to_bin_of_length(length, string_value):
bin = to_bin(string_value)
if len(bin) > length:
raise AssertionError('Too long binary value %s (max length %d)'
% (string_value, length))
return bin.rjust(length, '\x00')
def to_hex(binary):
return binascii.hexlify(binary)
def to_0xhex(binary):
return '0x' + to_hex(binary)
def to_binary_string_of_length(length, bytes):
result = bin(int(to_0xhex(bytes), 16))
if len(result) < length + 2:
result = '0b' + '0' * (length - len(result) + 2) + result[2:]
return result
def to_bin_str_from_int_string(length, value):
return to_binary_string_of_length(length, to_bin(value))[2:]
def to_tbcd_value(binary):
bin_str, value = to_binary_string_of_length(len(to_hex(binary)) *
4, binary), ""
for index in range(2, len(bin_str), 8):
if int(bin_str[index:index + 4], 2) == 15:
return value + str(int(bin_str[index + 4: index + 8], 2))
value += "%s%s" % (int(bin_str[index + 4:index + 8], 2),
int(bin_str[index: index + 4], 2))
return value
def to_tbcd_binary(tbcd_string):
value, index = "0b", 0
while index <= len(tbcd_string) - 2:
value += to_bin_str_from_int_string(4, tbcd_string[index + 1]) +\
to_bin_str_from_int_string(4, tbcd_string[index])
index += 2
return to_bin(value if index == len(tbcd_string)
else value + to_bin_str_from_int_string(4, 15) +
to_bin_str_from_int_string(4, tbcd_string[index]))
def to_twos_comp(val, bits):
"""compute the 2's compliment of int value val"""
if not val.startswith('-'):
return to_int(val)
value = _invert(to_bin_str_from_int_string(bits, bin(to_int(val[1:]))))
return int(value, 2) + 1
def from_twos_comp(val, bits):
"""compute the 2's compliment of int value val"""
if val & (1 << (bits - 1)):
val -= 1 << bits
return val
def _invert(value):
return "".join(str(int(a) ^ 1) for a in value)
def to_int(string_value):
if string_value in (None, ''):
raise Exception("No value or empty value given")
if string_value.startswith('0x') or string_value[:3] == '-0x':
return int(string_value, 16)
elif string_value.startswith('0b') or string_value[:3] == '-0b':
return int(string_value, 2)
return int(string_value) | /robotframework-rammbock-0.4.0.1.tar.gz/robotframework-rammbock-0.4.0.1/src/Rammbock/binary_tools.py | 0.451568 | 0.281799 | binary_tools.py | pypi |
import os
from robot.libraries.BuiltIn import BuiltIn
from .core import RammbockCore
from .message_sequence import SeqdiagGenerator
from .version import VERSION
class Rammbock(RammbockCore):
"""Rammbock is a binary protocol testing library for Robot Test Automation Framework.
To use Rammbock you need to first define a protocol, start the clients and servers you are going to mock,
and then define a message template for each message you are going to send or receive.
Example:
| *Settings * |
| Library | Rammbock |
| *Test Cases* |
| Send message | Define simple protocol |
| | Start server |
| | Start client |
| | Send message | status:0xcafebabe |
| | Verify server gets status | 0xcafebabe |
| | [Teardown] | `Reset Rammbock` |
| *Keywords* |
| Define simple protocol | `New protocol` | SimpleProtocol |
| | `u8` | msgId |
| | `u8` | messageLength |
| | `pdu` | messageLength - 2 |
| | `End protocol` |
| |
| Start server | `Start UDP server` | 127.0.0.1 | 8282 | protocol=SimpleProtocol |
| |
| Start client | `Start UDP client` | protocol=SimpleProtocol |
| | `Connect` | 127.0.0.1 | 8282 |
| | | | | |
| Send message | [Arguments] | @{params} |
| | `New message` | SimpleRequest | SimpleProtocol | msgId:0xff |
| | `u32` | status |
| | `Client sends message` | @{params} |
| |
| Verify server gets status | [Arguments] | ${status} |
| | ${msg} = | `Server receives message` |
| | Should be equal | ${msg.status.hex} | ${status} |
"""
ROBOT_LIBRARY_VERSION = VERSION
def u8(self, name, value=None, align=None):
"""Add an unsigned 1 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(1, name, value, align)
def u16(self, name, value=None, align=None):
"""Add an unsigned 2 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(2, name, value, align)
def u24(self, name, value=None, align=None):
"""Add an unsigned 3 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(3, name, value, align)
def u32(self, name, value=None, align=None):
"""Add an unsigned 4 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(4, name, value, align)
def u40(self, name, value=None, align=None):
"""Add an unsigned 5 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(5, name, value, align)
def u64(self, name, value=None, align=None):
"""Add an unsigned 8 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(8, name, value, align)
def u128(self, name, value=None, align=None):
"""Add an unsigned 16 byte integer field to template.
This is an convenience method that simply calls `Uint` keyword with predefined length."""
self.uint(16, name, value, align)
def i8(self, name, value=None, align=None):
"""Add an 1 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length."""
self.int(1, name, value, align)
def i32(self, name, value=None, align=None):
"""Add an 32 byte integer field to template.
This is an convenience method that simply calls `Int` keyword with predefined length."""
self.int(4, name, value, align)
def array(self, size, type, name, *parameters):
"""Define a new array of given `size` and containing fields of type `type`.
`name` if the name of this array element. The `type` is the name of keyword that is executed as the contents of
the array and optional extra parameters are passed as arguments to this keyword.
Examples:
| Array | 8 | u16 | myArray |
| u32 | length |
| Array | length | someStruct | myArray | <argument for someStruct> |
"""
self._new_list(size, name)
BuiltIn().run_keyword(type, '', *parameters)
self._end_list()
def container(self, name, length, type, *parameters):
"""Define a container with given length.
This is a convenience method creating a `Struct` with `length` containing fields defined in `type`.
"""
self.new_struct('Container', name, 'length=%s' % length)
BuiltIn().run_keyword(type, *parameters)
self.end_struct()
def case(self, size, kw, *parameters):
"""An element inside a bag started with `Start Bag`.
The first argument is size which can be absolute value like `1`, a range
like `0-3`, or just `*` to accept any number of elements.
Examples:
| Start bag | intBag |
| case | 0-1 | u8 | foo | 42 |
| case | 0-2 | u8 | bar | 1 |
| End bag |
"""
# TODO: check we are inside a bag!
self._start_bag_case(size)
BuiltIn().run_keyword(kw, *parameters)
self._end_bag_case()
def embed_seqdiag_sequence(self):
"""Create a message sequence diagram png file to output folder and embed the image to log file.
You need to have seqdiag installed to create the sequence diagram. See http://blockdiag.com/en/seqdiag/
"""
test_name = BuiltIn().replace_variables('${TEST NAME}')
outputdir = BuiltIn().replace_variables('${OUTPUTDIR}')
path = os.path.join(outputdir, test_name + '.seqdiag')
SeqdiagGenerator().compile(path, self._message_sequence) | /robotframework-rammbock-0.4.0.1.tar.gz/robotframework-rammbock-0.4.0.1/src/Rammbock/rammbock.py | 0.711631 | 0.487429 | rammbock.py | pypi |
from math import ceil
import re
from Rammbock.message import (Field, Union, Message, Header, List, Struct,
BinaryContainer, BinaryField, TBCDContainer,
Conditional, Bag)
from message_stream import MessageStream
from primitives import Length, Binary, TBCD, BagSize
from Rammbock.ordered_dict import OrderedDict
from Rammbock.binary_tools import (to_binary_string_of_length, to_bin,
to_tbcd_value, to_tbcd_binary)
from Rammbock.condition_parser import ConditionParser
from Rammbock.logger import logger
class _Template(object):
def __init__(self, name, parent):
self.parent = parent
self._fields = OrderedDict()
self.name = name
self._saved = False
def _pretty_print_fields(self, fields):
return ', '.join('%s:%s' % (key, value) for key, value in fields.items())
def _mark_referenced_field(self, field):
ref_field = self._get_field_recursive(field.length.field)
if not ref_field:
raise AssertionError('Length field %s unknown' % field.length.field)
ref_field.referenced_later = True
def add(self, field):
if field.type == 'pdu':
self._handle_pdu_field(field)
if self._get_field(field.name):
raise AssertionError("Duplicate field '%s' in '%s'" % (field.name, self._get_recursive_name()))
if field.has_length and field.length.has_references:
self._mark_referenced_field(field)
self._fields[field.name] = field
def _handle_pdu_field(self, field):
raise AssertionError('PDU field not allowed')
def _get_field(self, field_name):
return self._fields.get(field_name)
def _get_field_recursive(self, field_name):
return self._get_field(field_name) or self.parent and self.parent._get_field_recursive(field_name)
def _check_params_empty(self, message_fields, name):
for key in message_fields.keys():
if key.startswith('*'):
message_fields.pop(key)
if message_fields:
raise AssertionError("Unknown fields in '%s': %s" %
(self._get_recursive_name(), self._pretty_print_fields(message_fields)))
def _get_recursive_name(self):
return (self.parent._get_recursive_name() + "." if self.parent else '') + self.name
def _encode_fields(self, struct, params, little_endian=False):
for field in self._fields.values():
encoded = field.encode(params, struct, little_endian=little_endian)
# TODO: clean away this ugly hack that makes it possible to skip PDU
# (now it is a 0 length place holder in header)
if encoded:
struct[field.name] = encoded
self._check_params_empty(params, self.name)
def decode(self, data, parent=None, name=None, little_endian=False):
message = self._get_struct(name, parent)
data_index = 0
for field in self._fields.values():
message[field.name] = field.decode(data[data_index:], message, little_endian=little_endian)
data_index += len(message[field.name])
return message
def validate(self, message, message_fields):
errors = []
for field in self._fields.values():
errors += field.validate(message, message_fields)
self._check_params_empty(message_fields, self.name)
return errors
def _get_params_sub_tree(self, params, name=None):
result = {'*': params['*']} if '*' in params else {}
name = name or self.name
for key in params.keys():
prefix, _, ending = key.partition('.')
if prefix == name:
result[ending] = params.pop(key)
elif prefix == '*' and ending:
result[ending] = params[key]
return result
def _get_struct(self, name, parent):
return None
@property
def is_saved(self):
return self._saved
# TODO: Refactor the pdu to use the same dynamic length strategy as structs in encoding
class Protocol(_Template):
def __init__(self, name, little_endian=False, library=None):
_Template.__init__(self, name, None)
self.pdu = None
self.little_endian = little_endian
self.library = library
def header_length(self):
try:
return sum(field.get_static_length() for field in self._fields.values() if field.type != 'pdu')
except IndexError:
return -1
def encode(self, message, header_params):
header_params = header_params.copy()
header = Header(self.name)
self._encode_fields(header, header_params, little_endian=self.little_endian)
if self.pdu_length:
self.pdu_length.find_length_and_set_if_necessary(header, len(message._raw), little_endian=self.little_endian)
return header
def _handle_pdu_field(self, field):
if self.pdu:
raise AssertionError('Duplicate PDU field not allowed in protocol definition.')
self.pdu = field
@property
def pdu_length(self):
return self.pdu.length if self.pdu else None
def add(self, field):
if self.pdu:
raise AssertionError('Fields after PDU not supported.')
_Template.add(self, field)
# TODO: fields after the pdu
def _extract_values_from_data(self, data, header, values):
data_index = 0
for field in values:
if field is not self.pdu:
header[field.name] = field.decode(data[data_index:], header, little_endian=self.little_endian)
data_index += len(header[field.name])
return data[data_index:]
def read(self, stream, timeout=None):
# TODO: use all data if length cannot be obtained. Return amount of data
# used to stream
data = stream.read(self.header_length(), timeout=timeout)
header = Header(self.name)
unused_data = self._extract_values_from_data(data, header, self._fields.values())
stream.return_data(unused_data)
pdu_bytes = None
if self.pdu:
if self.pdu_length.static:
length = self.pdu_length.value
else:
length = self.pdu_length.calc_value(header[self.pdu_length.field].int)
# TODO: we need a timeout?
pdu_bytes = stream.read(length)
return header, pdu_bytes
def get_message_stream(self, buffered_stream):
return MessageStream(buffered_stream, self)
class MessageTemplate(_Template):
type = 'Message'
def __init__(self, message_name, protocol, header_params):
_Template.__init__(self, message_name, None)
self._protocol = protocol
self.header_parameters = header_params
def decode(self, data, parent=None, name=None, little_endian=False):
msg = _Template.decode(self, data, parent, name, little_endian)
self.check_message_lengths(msg, data)
return msg
def check_message_lengths(self, msg, data):
if len(msg) < len(data):
raise AssertionError('Received \'%s\', message too long. Expected %s but got %s' % (self.name, len(msg), len(data)))
def encode(self, message_params, header_params, little_endian=False):
message_params = message_params.copy()
if self.only_header:
parameters = self._headers(message_params)
return self._protocol.encode(None, parameters)
msg = Message(self.name)
self._encode_fields(msg, message_params, little_endian=little_endian)
if self._protocol:
header = self._protocol.encode(msg, self._headers(header_params))
msg._add_header(header)
return msg
def _headers(self, header_params):
result = {}
result.update(self.header_parameters)
result.update(header_params)
return result
def _get_struct(self, name, parent=None):
return Message(self.name)
def validate(self, message, message_fields, header_fields):
validation_params = self.header_parameters.copy()
if self.only_header:
return self._validate_with_header_only(message, message_fields, validation_params)
return self._validate_with_header_and_messagebody(message, message_fields, header_fields, validation_params)
def _validate_with_header_only(self, message, message_fields, validation_params):
validation_params.update(message_fields)
return self._protocol.validate(message, validation_params)
def _validate_with_header_and_messagebody(self, message, message_fields, header_fields, validation_params):
validation_params.update(header_fields)
return self._protocol.validate(message._header, validation_params) + _Template.validate(self, message, message_fields)
def set_as_saved(self):
self._saved = True
@property
def only_header(self):
return not bool(self._protocol.pdu)
class StructTemplate(_Template):
has_length = False
def __init__(self, type, name, parent, parameters=None, length=None, align=None):
self._parameters = parameters or {}
self.type = type
if length:
self._set_length(length)
self._align = int(align or 1)
_Template.__init__(self, name, parent)
def _set_length(self, length):
self.has_length = True
self.length = Length(length)
def get_static_length(self):
return sum(field.get_static_length() for field in self._fields.values())
def decode(self, data, parent=None, name=None, little_endian=False):
if self.has_length:
length = self.length.decode(parent)
data = data[:length]
return _Template.decode(self, data, parent, name, little_endian)
def encode(self, message_params, parent=None, name=None, little_endian=False):
struct = self._get_struct(name, parent)
self._add_struct_params(message_params)
self._encode_fields(struct,
self._get_params_sub_tree(message_params, name),
little_endian=little_endian)
if self.has_length:
length, aligned_length = self.length.find_length_and_set_if_necessary(parent, len(struct))
if len(struct) != length:
raise AssertionError('Length of struct %s does not match defined length. defined length:%s Struct:\n%s' % (self.name, length, repr(struct)))
return struct
# TODO: Cleanup setting the parent to constructor of message -elements
def _get_struct(self, name, parent):
struct = Struct(name or self.name, self.type, align=self._align)
struct._parent = parent
return struct
def validate(self, parent, message_fields, name=None):
self._add_struct_params(message_fields)
errors = []
name = name or self.name
message = parent[name]
if self.has_length:
length = self.length.decode(message)
if len(message) != length:
errors.append('Length of struct %s does not match defined length. defined length:%s struct length:%s' % (message._name, length, len(message)))
return errors + _Template.validate(self, message, self._get_params_sub_tree(message_fields, name))
def _add_struct_params(self, params):
for key in self._parameters.keys():
params[key] = self._parameters.pop(key) if key not in params else params[key]
class UnionTemplate(_Template):
has_length = False
def __init__(self, type, name, parent):
self.type = type
_Template.__init__(self, name, parent)
def add(self, field):
field.get_static_length()
self._fields[field.name] = field
def get_static_length(self):
return max(field.get_static_length() for field in self._fields.values())
def decode(self, data, parent=None, name=None, little_endian=False):
union = self._get_struct(name, parent)
for field in self._fields.values():
union[field.name] = field.decode(data, union, little_endian=little_endian)
return union
def encode(self, union_params, parent=None, name=None, little_endian=False):
name = name or self.name
if name not in union_params:
raise AssertionError("Value not chosen for union '%s'" % self._get_recursive_name())
chosen_one = union_params[name]
if chosen_one not in self._fields:
raise Exception("Unknown union field '%s' in '%s'" % (chosen_one, self._get_recursive_name()))
field = self._fields[chosen_one]
union = self._get_struct(name, parent)
union[field.name] = field.encode(self._get_params_sub_tree(union_params, name),
union,
little_endian=little_endian)
return union
def _get_struct(self, name, parent):
union = Union(name or self.name, self.get_static_length())
union._parent = parent
return union
def validate(self, parent, message_fields, name=None):
name = name or self.name
message = parent[name]
return _Template.validate(self, message, self._get_params_sub_tree(message_fields, name))
class BagTemplate(_Template):
has_length = False
type = 'Bag'
def __init__(self, name, parent):
_Template.__init__(self, name, parent)
def add(self, field):
if field.type != 'Case':
raise AssertionError('Field of type %s added to bag. Has to be of type Case.' % field.type)
self._fields[field.name] = field
def encode(self, set_params, parent=None, name=None, little_endian=False):
raise AssertionError("Set can not be encoded.")
def decode(self, data, parent=None, name=None, little_endian=False):
bag = self._get_struct(name, parent)
while data:
match = self._decode_one(data, bag, little_endian=little_endian)
data = data[len(match['0']):]
return bag
def _decode_one(self, data, bag, little_endian=False):
for case in self._fields.values():
try:
match = case.decode(data, bag, little_endian=little_endian)
logger.trace("'%s' matches in bag '%s'. value: %r" % (case.name, self.name, match[match.len - 1]))
return match
except Exception as e:
logger.trace("'%s' does not match in bag '%s'. Error: %s" % (case.name, self.name, e.message))
raise AssertionError("Unable to decode bag value.")
def _get_struct(self, name, parent):
bag = Bag(name or self.name)
bag._parent = parent
for case in self._fields.values():
bag[case.name] = case.get_message_object(bag)
return bag
def validate(self, parent, message_fields, name=None):
name = name or self.name
params_subtree = self._get_params_sub_tree(message_fields, name)
bag = parent[name]
errors = []
for field in self._fields.values():
errors += field.validate(bag, params_subtree)
return errors
class CaseTemplate(_Template):
has_length = False
type = 'Case'
def __init__(self, size, parent):
self.size = BagSize(size)
_Template.__init__(self, None, parent)
@property
def field(self):
return self._fields.values()[0]
def add(self, field):
self.name = field.name
_Template.add(self, field)
def decode(self, data, parent, name=None, little_endian=False):
case = parent[self.name]
# TODO: Cleanup
field = self.field.decode(data, case, name=str(case.len),
little_endian=little_endian)
case.add(field)
errors = self.field.validate(case, {}, str(case.len - 1))
if errors:
del case[case.len - 1]
raise AssertionError(errors[0])
return case
# FIXME: now validating only number of entries
def validate(self, parent, message_fields, name=None):
errors = []
case = parent[name or self.name]
if case.len < self.size.min or case.len > self.size.max:
errors.append('%s values in bag %s for %s (size %s).' %
(case.len or 'No',
parent._name,
case._name,
self.size))
return errors
def get_message_object(self, parent):
lst = List(self.name, self.field.type)
lst._parent = parent
return lst
# TODO: check that only one field is added to list
# TODO: list field could be overriden
class ListTemplate(_Template):
param_pattern = re.compile(r'([^.]*?)\[(.*?)\](.*)')
has_length = True
type = 'List'
def __init__(self, length, name, parent):
self.length = Length(length)
_Template.__init__(self, name, parent)
def get_static_length(self):
return self.length.value * self.field.get_static_length()
def encode(self, message_params, parent, name=None, little_endian=False):
name = name or self.name
params_subtree = self._get_params_sub_tree(message_params, name)
list = self._get_struct(name, parent)
for index in range(self.length.decode(parent)):
list[str(index)] = self.field.encode(params_subtree,
parent,
name=str(index),
little_endian=little_endian)
self._check_params_empty(params_subtree, name)
return list
@property
def field(self):
return self._fields.values()[0]
def _get_struct(self, name=None, parent=None):
ls = List(name or self.name, self.field.type)
ls._parent = parent
return ls
def decode(self, data, parent, name=None, little_endian=False):
name = name or self.name
message = self._get_struct(name, parent)
data_index = 0
# maximum_length is given for free length (*) to limit the absolute maximum number of entries
for index in range(0, self.length.decode(parent, maximum_length=len(data))):
message[str(index)] = self.field.decode(data[data_index:], message, name=str(index), little_endian=little_endian)
data_index += len(message[index])
if self.length.free and data_index == len(data):
break
return message
def validate(self, parent, message_fields, name=None):
name = name or self.name
params_subtree = self._get_params_sub_tree(message_fields, name)
list = parent[name]
errors = []
for index in range(list.len):
errors += self.field.validate(list, params_subtree, name=str(index))
self._check_params_empty(params_subtree, name)
return errors
def _get_params_sub_tree(self, params, name=None):
result = OrderedDict({'*': params['*']} if '*' in params else {})
name = name or self.name
for key in params.keys():
self._consume_params_with_brackets(name, params, result, key)
self._consume_dot_syntax(name, params, result, key)
return result
def _consume_params_with_brackets(self, name, params, result, key):
match = self.param_pattern.match(key)
if match:
prefix, child_name, ending = match.groups()
if prefix == name:
result[child_name + ending] = params.pop(key)
elif prefix == '*':
result[child_name + ending] = params[key]
def _consume_dot_syntax(self, name, params, result, key):
prefix, _, ending = key.partition('.')
if prefix == name:
result[ending] = params.pop(key)
elif prefix == '*' and ending:
result[ending] = params[key]
class BinaryContainerTemplate(_Template):
has_length = False
type = 'BinaryContainer'
def get_static_length(self):
return self.binlength / 8
def add(self, field):
if not isinstance(field, Binary):
raise AssertionError('Binary container can only have binary fields.')
_Template.add(self, field)
@property
def binlength(self):
return sum(field.length.value for field in self._fields.values())
def verify(self):
if self.binlength % 8:
raise AssertionError('Length of binary container %s has to be divisible by 8. Length %s' % (self.name, self.binlength))
def encode(self, message_params, parent=None, name=None, little_endian=False):
container = self._get_struct(name, parent, little_endian=little_endian)
self._encode_fields(container, self._get_params_sub_tree(message_params, name))
return container
def decode(self, data, parent=None, name=None, little_endian=False):
container = self._get_struct(name, parent, little_endian=little_endian)
if little_endian:
data = data[::-1]
bin_str = to_binary_string_of_length(self.binlength, data[:self.binlength / 8])
data_index = 2
for field in self._fields.values():
container[field.name] = self._create_field(bin_str, data_index,
field)
data_index += field.length.value
return container
def _create_field(self, bin_str, data_index, field):
return BinaryField(field.length.value, field.name,
self._binary_substring(bin_str, data_index, field))
def _binary_substring(self, bin_str, data_index, field):
return to_bin(
"0b" + bin_str[data_index:data_index + field.length.value])
def validate(self, parent, message_fields, name=None):
name = name or self.name
errors = []
message = parent[name]
return errors + _Template.validate(self, message, self._get_params_sub_tree(message_fields, name))
def _get_struct(self, name, parent, little_endian=False):
cont = BinaryContainer(name or self.name, little_endian=little_endian)
cont._parent = parent
return cont
class TBCDContainerTemplate(_Template):
has_length = False
type = 'TBCDContainer'
def get_static_length(self):
return self.binlength / 8
def _verify_not_little_endian(self, little_endian):
if little_endian:
raise AssertionError('Little endian TBCD fields are not supported.')
def add(self, field):
if not isinstance(field, TBCD):
raise AssertionError('TBCD container can only have TBCD fields.')
_Template.add(self, field)
def encode(self, message_params, parent=None, name=None, little_endian=False):
self._verify_not_little_endian(little_endian)
container = self._get_struct(name, parent)
self._encode_fields(container, self._get_params_sub_tree(message_params, name))
return container
def decode(self, data, parent=None, name=None, little_endian=False):
self._verify_not_little_endian(little_endian)
container = self._get_struct(name, parent)
a = to_tbcd_value(data)
index = 0
for field in self._fields.values():
field_length = field.length.decode(container, len(data) * 2 - index)
container[field.name] = Field(field.type, field.name, to_tbcd_binary(a[index:index + field_length]))
index += field_length
return container
def validate(self, parent, message_fields, name=None):
name = name or self.name
errors = []
return errors + _Template.validate(self, parent[name], self._get_params_sub_tree(message_fields, name))
@property
def binlength(self):
length = sum(field.length.value for field in self._fields.values())
return int(ceil(length / 2.0) * 8)
def _get_struct(self, name, parent):
tbcd = TBCDContainer(name or self.name)
tbcd._parent = parent
return tbcd
class ConditionalTemplate(_Template):
has_length = False
type = 'Conditional'
def __init__(self, condition, name, parent):
self.condition = ConditionParser(condition)
_Template.__init__(self, name, parent)
def encode(self, message_params, parent=None, name=None, little_endian=False):
conditional = self._get_struct(name, parent)
if conditional.exists:
self._encode_fields(conditional,
self._get_params_sub_tree(message_params, name),
little_endian=little_endian)
return conditional
def decode(self, data, parent=None, name=None, little_endian=False):
if self.condition.evaluate(parent):
return _Template.decode(self, data, parent, name, little_endian)
else:
return self._get_struct(name, parent)
def validate(self, parent, message_fields, name=None):
name = name or self.name
message = parent[name]
if message.exists:
return _Template.validate(self, message, self._get_params_sub_tree(message_fields, name))
return []
def _get_struct(self, name, parent):
conditional = Conditional(name or self.name)
conditional._parent = parent
conditional.exists = self.condition.evaluate(parent)
return conditional | /robotframework-rammbock-0.4.0.1.tar.gz/robotframework-rammbock-0.4.0.1/src/Rammbock/templates/containers.py | 0.441914 | 0.177775 | containers.py | pypi |
from math import ceil
import math
import sys
import re
from Rammbock.message import Field, BinaryField
from Rammbock.binary_tools import to_bin_of_length, to_0xhex, to_tbcd_binary, \
to_tbcd_value, to_bin, to_twos_comp, to_int
class _TemplateField(object):
def __init__(self, name, default_value):
self._set_default_value(default_value)
self.name = name
has_length = True
can_be_little_endian = False
referenced_later = False
def get_static_length(self):
if not self.length.static:
raise IndexError('Length of %s is dynamic.' % self._get_name())
return self.length.value
def _get_element_value(self, paramdict, name=None):
return paramdict.get(self._get_name(name), self.default_value)
def _get_element_value_and_remove_from_params(self, paramdict, name=None):
wild_card = paramdict.get('*') if not self.referenced_later else None
return paramdict.pop(self._get_name(name),
self.default_value or wild_card)
def encode(self, paramdict, parent, name=None, little_endian=False):
value = self._get_element_value_and_remove_from_params(paramdict, name)
if not value and self.referenced_later:
return PlaceHolderField(self)
return self._to_field(name, value, parent, little_endian=little_endian)
def _to_field(self, name, value, parent, little_endian=False):
field_name, field_value = self._encode_value(value, parent, little_endian=little_endian)
return Field(self.type, self._get_name(name), field_name, field_value, little_endian=little_endian)
def decode(self, data, message, name=None, little_endian=False):
data = self._prepare_data(data)
length, aligned_length = self.length.decode_lengths(message, len(data))
if len(data) < aligned_length:
raise Exception("Not enough data for '%s'. Needs %s bytes, given %s" % (self._get_recursive_name(message), aligned_length, len(data)))
return Field(self.type,
self._get_name(name),
data[:length],
aligned_len=aligned_length,
little_endian=little_endian and self.can_be_little_endian)
def _prepare_data(self, data):
return data
def validate(self, parent, paramdict, name=None):
name = name or self.name
field = parent[name]
value = field.bytes
forced_value = self._get_element_value_and_remove_from_params(paramdict, name)
if not forced_value or forced_value == 'None':
return []
elif forced_value.startswith('('):
return self._validate_pattern(forced_value, value, field)
return self._validate_exact_match(forced_value, value, field)
def _validate_pattern(self, forced_pattern, value, field):
if self._validate_or(forced_pattern, value, field):
return []
if self._validate_masked(forced_pattern, value):
return []
return ["Value of field '%s' does not match pattern '%s!=%s'" %
(field._get_recursive_name(), to_0xhex(value), forced_pattern)]
def _validate_or(self, forced_pattern, value, field):
if forced_pattern.find('|') != -1:
patterns = forced_pattern[1:-1].split('|')
for pattern in patterns:
if self._is_match(pattern, value, field._parent):
return True
return False
def _validate_masked(self, forced_pattern, value):
if forced_pattern.find('&') != -1:
masked_val, masked_field = self._apply_mask_to_values(forced_pattern, value)
if masked_val == masked_field:
return True
return False
def _apply_mask_to_values(self, forced_pattern, value):
val = forced_pattern[1:-1].split('&')[0].strip()
mask = forced_pattern[1:-1].split('&')[1].strip()
return to_int(val) & to_int(mask), to_int(to_0xhex(value)) & to_int(mask)
def _is_match(self, forced_value, value, parent):
# TODO: Should pass msg
forced_binary_val, _ = self._encode_value(forced_value, parent)
return forced_binary_val == value
def _validate_exact_match(self, forced_value, value, field):
if not self._is_match(forced_value, value, field._parent):
return ['Value of field %s does not match %s!=%s' %
(field._get_recursive_name(), self._default_presentation_format(value), forced_value)]
return []
def _default_presentation_format(self, value):
return to_0xhex(value)
def _get_name(self, name=None):
return name or self.name or self.type
def _raise_error_if_no_value(self, value, parent):
if value in (None, ''):
raise AssertionError('Value of %s not set' % self._get_recursive_name(parent))
def _get_recursive_name(self, parent):
if not parent:
return self.name
return parent._get_recursive_name() + self.name
def _set_default_value(self, value):
self.default_value = str(value) if value and value != '""' else None
class PlaceHolderField(object):
_type = 'referenced_later'
_parent = None
def __init__(self, template):
self.template = template
class UInt(_TemplateField):
type = 'uint'
can_be_little_endian = True
def __init__(self, length, name, default_value=None, align=None):
_TemplateField.__init__(self, name, default_value)
self.length = Length(length, align)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
length, aligned_length = self.length.decode_lengths(message)
binary = to_bin_of_length(length, value)
binary = binary[::-1] if little_endian else binary
return binary, aligned_length
class Int(UInt):
type = 'int'
can_be_little_endian = True
def __init__(self, length, name, default_value=None, align=None):
UInt.__init__(self, length, name, default_value, align)
def _get_int_value(self, message, value):
bin_len = self.length.decode_lengths(message)[0] * 8
min = pow(-2, (bin_len - 1))
max = pow(2, (bin_len - 1)) - 1
if not min <= to_int(value) <= max:
raise AssertionError('Value %s out of range (%d..%d)'
% (value, min, max))
return to_twos_comp(value, bin_len)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
value = self._get_int_value(message, value)
return UInt._encode_value(self, value, message, little_endian)
class Char(_TemplateField):
type = 'chars'
def __init__(self, length, name, default_value=None, terminator=None):
_TemplateField.__init__(self, name, default_value)
self._terminator = to_bin(terminator)
self.length = Length(length)
def _encode_value(self, value, message, little_endian=False):
if isinstance(value, Field):
value = value._value
else:
value = str(value or '')
value += self._terminator
length, aligned_length = self.length.find_length_and_set_if_necessary(message, len(value))
return value.ljust(length, '\x00'), aligned_length
def _prepare_data(self, data):
if self._terminator:
return data[0:data.index(self._terminator) + len(self._terminator)]
return data
class Binary(_TemplateField):
type = 'bin'
def __init__(self, length, name, default_value=None):
_TemplateField.__init__(self, name, default_value)
self.length = Length(length)
if not self.length.static:
raise AssertionError('Binary field length must be static. Length: %s' % length)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
minimum_binary = to_bin(value)
length, aligned = self.length.decode_lengths(message, len(minimum_binary))
binary = to_bin_of_length(self._byte_length(length), value)
return binary, self._byte_length(aligned)
def _to_field(self, name, value, parent, little_endian=False):
field_name, field_value = self._encode_value(value, parent, little_endian=little_endian)
return BinaryField(self.length.value, self._get_name(name), field_name, field_value, little_endian=little_endian)
def _byte_length(self, length):
return int(ceil(length / 8.0))
def _is_match(self, forced_value, value, message):
forced_binary_val, _ = self._encode_value(forced_value, message) # TODO: Should pass msg
return int(to_0xhex(forced_binary_val), 16) == int(to_0xhex(value), 16)
class TBCD(_TemplateField):
type = 'tbcd'
def __init__(self, size, name, default_value):
_TemplateField.__init__(self, name, default_value)
self.length = Length(size)
def _encode_value(self, value, message, little_endian=False):
self._raise_error_if_no_value(value, message)
binary = to_tbcd_binary(value)
length = self.length.decode(message, len(binary))
return binary, self._byte_length(length)
def _default_presentation_format(self, value):
return to_tbcd_value(value)
def _byte_length(self, length):
return int(ceil(length / 2.0))
class PDU(_TemplateField):
type = 'pdu'
name = '__pdu__'
def __init__(self, length):
self.length = Length(length)
def encode(self, params, parent, little_endian=False):
return None
def validate(self, parent, paramdict, name=None):
return []
def Length(value, align=None):
value = str(value)
if align:
align = int(align)
else:
align = 1
if align < 1:
raise Exception('Illegal alignment %d' % align)
elif value.isdigit():
return _StaticLength(int(value), align)
elif value == '*':
return _FreeLength(align)
return _DynamicLength(value, align)
class _Length(object):
free = False
def __init__(self):
self.value = None
self.align = None
def decode_lengths(self, message, max_length=None):
raise Exception("Override this method in implementing class.")
def _get_aligned_lengths(self, length):
return length, length + (self.align - length % self.align) % self.align
def decode(self, message, maximum_length=None):
"""Decode the length of this field. Maximum length is the maximum
length available from data or None if maximum length is not known.
"""
return self.decode_lengths(message, maximum_length)[0]
class _StaticLength(_Length):
static = True
has_references = False
def __init__(self, value, align):
_Length.__init__(self)
self.value = int(value)
self.align = int(align)
def decode_lengths(self, message, max_length=None):
return self._get_aligned_lengths(self.value)
def find_length_and_set_if_necessary(self, message, min_length, little_endian=False):
return self._get_aligned_lengths(self.value)
class _FreeLength(_Length):
static = False
has_references = False
free = True
def __init__(self, align):
self.align = int(align)
def decode_lengths(self, message, max_length=None):
if max_length is None:
raise AssertionError('Free length (*) can only be used on context where maximum byte length is unambiguous')
return self._get_aligned_lengths(max_length)
def find_length_and_set_if_necessary(self, message, min_length):
return self._get_aligned_lengths(min_length)
class _DynamicLength(_Length):
static = False
has_references = True
def __init__(self, value, align):
self.field, self.value_calculator = parse_field_and_calculator(value)
self.align = int(align)
def calc_value(self, param):
return self.value_calculator.calc_value(param)
def solve_parameter(self, length):
return self.value_calculator.solve_parameter(length)
def decode_lengths(self, parent, max_length=None):
reference = self._find_reference(parent)
if not self._has_been_set(reference):
raise AssertionError('Value of %s not set' % self.field)
return self._get_aligned_lengths(self.calc_value(reference.int))
def _find_reference(self, parent):
if self.field in parent:
return parent[self.field]
return self._find_reference(parent._parent) or None
def _has_been_set(self, reference):
return reference._type != 'referenced_later'
def _set_length(self, reference, min_length, little_endian=False):
value_len, aligned_len = self._get_aligned_lengths(min_length)
reference._parent[self.field] = self._encode_ref_length(self.solve_parameter(aligned_len),
reference,
little_endian=little_endian)
return value_len, aligned_len
def _encode_ref_length(self, aligned_len, reference, little_endian=False):
return reference.template.encode({self.field: str(aligned_len)},
reference._parent, little_endian=little_endian)
def find_length_and_set_if_necessary(self, parent, min_length, little_endian=False):
reference = self._find_reference(parent)
if self._has_been_set(reference):
self._raise_error_if_not_enough_space(reference, self.solve_parameter(min_length))
return self._get_aligned_lengths(self.calc_value(reference.int))
return self._set_length(reference, min_length, little_endian=little_endian)
def _raise_error_if_not_enough_space(self, reference, min_length):
if reference.int < min_length:
raise IndexError("Value for length is too short")
@property
def value(self):
raise IndexError('Length is dynamic.')
def _partition(operator, value):
return (val.strip() for val in value.rpartition(operator))
def parse_field_and_calculator(value):
if "-" in value:
field, _, subtractor = _partition('-', value)
return field, Subtract(int(subtractor))
elif "+" in value:
field, _, add = _partition('+', value)
return field, Adder(int(add))
elif "*" in value:
field, _, multiplier = _partition('*', value)
return field, Multiplier(int(multiplier))
return value.strip(), SingleValue()
class SingleValue(object):
def calc_value(self, param):
return param
def solve_parameter(self, length):
return length
class Subtract(object):
def __init__(self, subtractor):
self.subtractor = subtractor
def calc_value(self, param):
return param - self.subtractor
def solve_parameter(self, length):
return length + self.subtractor
class Adder(object):
def __init__(self, add):
self.add = add
def calc_value(self, param):
return param + self.add
def solve_parameter(self, length):
return length - self.add
class Multiplier(object):
def __init__(self, multiplier):
self.multiplier = multiplier
def calc_value(self, param):
return param * self.multiplier
def solve_parameter(self, length):
return math.ceil(length / float(self.multiplier))
class BagSize(object):
fixed = re.compile(r'[1-9][0-9]*\Z')
range = re.compile(r'([0-9]+)\s*-\s*([1-9][0-9]*)\Z')
def __init__(self, size):
# TODO: add open range 2-n
size = size.strip()
if size == '*':
self._set_min_max(0, sys.maxint)
elif self.fixed.match(size):
self._set_min_max(size, size)
elif self.range.match(size):
self._set_min_max(*self.range.match(size).groups())
else:
raise AssertionError("Invalid bag size %s." % size)
def _set_min_max(self, min_, max_):
self.min = int(min_)
self.max = int(max_)
if self.min > self.max:
raise AssertionError("Invalid bag size %s." % str(self))
def __str__(self):
if self.min == self.max:
return str(self.min)
elif self.min == 0 and self.max == sys.maxint:
return '*'
return '%s-%s' % (self.min, self.max) | /robotframework-rammbock-0.4.0.1.tar.gz/robotframework-rammbock-0.4.0.1/src/Rammbock/templates/primitives.py | 0.465387 | 0.201401 | primitives.py | pypi |
from robot.api import logger
from robot.api.deco import keyword
import redis
from redis.sentinel import Sentinel
from redis.cluster import RedisCluster as RedisCluster
from redis.cluster import ClusterNode
__author__ = 'Traitanit Huangsri'
__email__ = 'traitanit.hua@gmail.com'
class RedisLibraryKeywords(object):
@keyword('Get Redis Cluster')
def get_redis_cluster(self, redis_host, redis_port=6379):
"""Get from the Redis master's address corresponding.
Arguments:
- redis_host: hostname or IP address of the Redis server.
- redis_port: Redis port number (default=6379)
Return cluster detail
Examples:
| @{cluster_detail}= | Get Redis Cluster | 'redis-dev.com' | 6379 |
"""
try:
nodes = [ClusterNode(redis_host, redis_port)]
redis_cluster = RedisCluster(startup_nodes=nodes)
except Exception as ex:
logger.error(str(ex))
raise Exception(str(ex))
return redis_cluster
@keyword('Get Redis Master')
def get_redis_master(self, redis_host, redis_port=26379, service_name=None):
"""Get from the Redis master's address corresponding.
Arguments:
- redis_host: hostname or IP address of the Redis server.
- redis_port: Redis port number (default=6379)
- service_name: Redis master's address corresponding
Return sentinel detail lists
Examples:
| @{sentinel_detail}= | Get Redis Master | 'redis-dev.com' | 6379 | 'service-name' |
"""
try:
sentinel = Sentinel([(redis_host, redis_port)], socket_timeout=0.1)
sentinel_detail = sentinel.discover_master(service_name)
except Exception as ex:
logger.error(str(ex))
raise Exception(str(ex))
return sentinel_detail
@keyword('Connect To Redis')
def connect_to_redis(self, redis_host, redis_port=6379, db=0, redis_password=None, ssl=False, ssl_ca_certs=None):
"""Connect to the Redis server.
Arguments:
- redis_host: hostname or IP address of the Redis server.
- redis_port: Redis port number (default=6379)
- db: Redis keyspace number (default=0)
- redis_password: password for Redis authentication
- ssl: Connect Redis with SSL or not (default is False)
- ssl_ca_certs: CA Certification when connect Redis with SSL
Return redis connection object
Examples:
| ${redis_conn}= | Connect To Redis | redis-dev.com | 6379 | redis_password=password |
"""
try:
redis_conn = redis.StrictRedis(host=redis_host, port=redis_port, db=db,
password=redis_password, ssl=ssl, ssl_ca_certs=ssl_ca_certs)
except Exception as ex:
logger.error(str(ex))
raise Exception(str(ex))
return redis_conn
@keyword('Connect To Redis From URL')
def connect_to_redis_from_url(self, redis_url, db=0):
"""Connect to the Redis server.
Arguments:
- redis_url: URL for connect to Redis. (redis://<username>:<password>@<host>:<port>)
- db: Redis keyspace number (default=0)
Return redis connection object
Examples:
| ${redis_conn}= | Connect To Redis From URL | redis://admin:adminpassword@redis-dev.com:6379 |
"""
try:
logger.info("Creating Redis Connection using : url=%s " % redis_url)
redis_conn = redis.from_url(redis_url, db)
except Exception as ex:
logger.error(str(ex))
raise Exception(str(ex))
return redis_conn
@keyword('Flush All')
def flush_all(self, redis_conn):
""" Delete all keys from Redis
Arguments:
- redis_conn: Redis connection object
Examples:
| Flush All | ${redis_conn} |
"""
return redis_conn.flushall()
@keyword('Delete From Redis')
def delete_from_redis(self, redis_conn, key):
""" Delete data from Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| Delete From Redis | ${redis_conn} | BARCODE|1234567890 |
"""
return redis_conn.delete(key)
@keyword('Append To Redis')
def append_to_redis(self, redis_conn, key, value):
""" Append data to Redis. If key doesn't exist, create it with value.
Return the new length of the value at key.
Arguments:
- redis_conn: Redis connection object.
- key: String key.
- value: String value.
Examples:
| Append To Redis | ${redis_conn} | BARCODE|1234567890 | ${data} |
"""
return redis_conn.append(key, value)
@keyword('Set To Redis')
def set_to_redis(self, redis_conn, key, data, expire_time=3600):
""" Set data to Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
- data: String data
- expire_time: TTL default value is 3600s
Examples:
| Set To Redis | ${redis_conn} | BARCODE|0000000011 | ${data} |
| Set To Redis | ${redis_conn} | BARCODE|1234567890 | ${data} | expire_time=600 |
"""
return redis_conn.set(key, data, expire_time)
@keyword('Get From Redis')
def get_from_redis(self, redis_conn, key):
""" Get cached data from Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| ${data}= | Get From Redis | ${redis_conn} | BARCODE|1234567890 |
"""
return redis_conn.get(key)
@keyword('Expire Data From Redis')
def expire_data_from_redis(self, redis_conn, key, expire_time=0):
""" Expire items from Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
- expire_time: waiting time to expire data (Default = expire now)
Examples:
| Expire Data From Redis | ${redis_conn} | BARCODE|1234567890 |
"""
redis_conn.expire(key, expire_time)
@keyword('Get Time To Live In Redis')
def get_time_to_live_in_redis(self, redis_conn, key):
""" Return time to live in Redis (minutes)
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| Get Time To Live In Redis | ${redis_conn} | BARCODE|1234567890 |
"""
ttl = redis_conn.ttl(key)
if ttl > 0:
return redis_conn.ttl(key) / 60
else:
return redis_conn.ttl(key)
@keyword('Get Time To Live In Redis Second')
def get_time_to_live_in_redis_second(self, redis_conn, key):
""" Return time to live in Redis (seconds)
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| Get Time To Live In Redis Second | ${redis_conn} | BARCODE|1234567890 |
"""
return redis_conn.ttl(key)
@keyword('Redis Key Should Be Exist')
def redis_key_should_be_exist(self, redis_conn, key):
""" Keyword will fail if specify key doesn't exist in Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| Redis Key Should Be Exist | ${redis_conn} | BARCODE|1234567890 |
"""
if not redis_conn.exists(key):
logger.error("Key: " + key + " doesn't exist in Redis.")
raise AssertionError
@keyword('Redis Key Should Not Be Exist')
def redis_key_should_not_be_exist(self, redis_conn, key):
""" Keyword will fail if specify key exist in Redis
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find.
Examples:
| Redis Key Should Not Be Exist | ${redis_conn} | BARCODE|1234567890 |
"""
if redis_conn.exists(key):
logger.error("Key: " + key + " exists in Redis.")
raise AssertionError
@keyword('Get Dictionary From Redis Hash')
def get_dict_from_redis_hash(self, redis_conn, hash_name):
""" Get cached data from Redis hashes
Arguments:
- redis_conn: Redis connection object
- hash_name: Hash name.
Examples:
| ${data}= | Get Dictionary From Redis Hash | ${redis_conn} | HASHNAME |
"""
return redis_conn.hgetall(hash_name)
@keyword('Get From Redis Hash')
def get_from_redis_hash(self, redis_conn, hash_name, key):
""" Get cached data from Redis hashes by key
Arguments:
- redis_conn: Redis connection object
- hash_name: Hash name.
- key: String keyword to find.
Examples:
| ${data}= | Get From Redis Hash | ${redis_conn} | HASHNAME | BARCODE|1234567890 |
"""
return redis_conn.hget(hash_name, key)
@keyword('Set To Redis Hash')
def set_to_redis_hash(self, redis_conn, hash_name, key, data):
""" Set data to Redis within Hash
Arguments:
- redis_conn: Redis connection object
- hash_name: String hash
- key: String keyword to find.
- data: String data
Examples:
| Set To Redis Hash | ${redis_conn} | HASHNAME | key | value |
"""
return redis_conn.hset(hash_name, key, data)
@keyword('Add Hash Map To Redis')
def add_hash_map_to_redis(self, redis_conn, hash_name, dict_data):
""" Set data to Redis within Hash
Arguments:
- redis_conn: Redis connection object
- hash_name: String hash
- dict_data: data as dict
Examples:
| Add Hash Map To Redis | ${redis_conn} | HASHNAME | {"name":"Fred","age":25} |
"""
return redis_conn.hmset(hash_name, dict_data)
@keyword('Delete From Redis Hash')
def delete_from_redis_hash(self, redis_conn, hash_name, key):
"""Delete ``key`` from hash ``name``
Arguments:
- redis_conn: Redis connection object.
- hash_name: Hash keyword to find.
- key: String keyword to find.
Examples:
| Delete From Redis Hash | ${redis_conn} | HASHNAME | KEY |
"""
return redis_conn.hdel(hash_name, key)
@keyword('Redis Hash Key Should Be Exist')
def redis_hash_key_should_be_exist(self, redis_conn, hash_name, key):
""" Keyword will fail if specify hash key doesn't exist in Redis
Arguments:
- redis_conn: Redis connection object
- hash_name: Hash name.
- key: String keyword to find.
Examples:
| Redis Hash Key Should Be Exist | ${redis_conn} | BARCODE|1234567890 |
"""
if not redis_conn.hexists(hash_name, key):
logger.error("Hash: " + hash_name + " and Key: " +
key + " doesn't exist in Redis.")
raise AssertionError
@keyword('Redis Hash Key Should Not Be Exist')
def redis_hash_key_should_not_be_exist(self, redis_conn, hash_name, key):
""" Keyword will fail if specify hash key exist in Redis
Arguments:
- redis_conn: Redis connection object
- hash_name: Hash name.
- key: String keyword to find.
Examples:
| Redis Hash Key Should Not Be Exist | ${redis_conn} | BARCODE|1234567890 |
"""
if redis_conn.hexists(hash_name, key):
logger.error("Hash: " + hash_name + " and Key: " +
key + " exists in Redis.")
raise AssertionError
@keyword('Get Set From Redis Set')
def get_set_from_redis_set(self, redis_conn, set_name):
""" Get cached members from Redis sets.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name to find.
Examples:
| ${data}= | Get Set From Redis Set | ${redis_conn} | Fruit |
"""
return redis_conn.smembers(set_name)
@keyword('Add Set Data To Redis Set')
def add_set_data_to_redis_set(self, redis_conn, set_name, *args):
""" Add set data into Redis.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name as key in redis
- *args: Item that you need to put in set
Examples:
| Add Set Data To Redis Set | ${redis_conn} | Fruit | Banana | Apple | Orage |
"""
return redis_conn.sadd(set_name, *args)
@keyword('Item Should Exist In Redis Set')
def item_should_exist_in_redis_set(self, redis_conn, set_name, item):
""" Check item should exist in set.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name as key in redis
- Item: Item that you need check
Examples:
| Item Should Exist In Redis Set | ${redis_conn} | Fruit | Apple |
"""
if not redis_conn.sismember(set_name, item):
logger.error("Item: " + item + " doesn't exist in Redis.")
raise AssertionError
@keyword('Item Should Not Exist In Redis Set')
def item_should_not_exist_in_redis_set(self, redis_conn, set_name, item):
""" Check item should not exist in set.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name as key in redis
- Item: Item that you need check
Examples:
| Item Should Not Exist In Redis Set | ${redis_conn} | Fruit | Mongo |
"""
if redis_conn.sismember(set_name, item):
logger.error("Item: " + item + " exists in Redis.")
raise AssertionError
@keyword('Get Length of Redis Set')
def get_length_of_redis_set(self, redis_conn, set_name):
""" Get length of set.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name as key in redis
Examples:
| ${set_length} | Get Length of Redis Set | ${redis_conn} | Fruit |
"""
return redis_conn.scard(set_name)
@keyword('Delete Set Data In Redis Set')
def delete_set_data_in_redis_set(self, redis_conn, set_name, *args):
""" Delete set of item from set.
If you need to delete set from redis use 'Delete From Redis' keyword.
Arguments:
- redis_conn: Redis connection object
- set_name: Set name as key in redis
- *args: Item that you need to delete from set
Examples:
| Delete Set Data In Redis Set | ${redis_conn} | Fruit | Banana | Orage |
"""
return redis_conn.srem(set_name, *args)
@keyword('Push Item To First Index In List Redis')
def push_item_to_first_index_in_list_redis(self, redis_conn, list_name, *args):
""" Push item to first index in list. If you many arguments, last arguments will be the first item.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- *args: Item that you need to put in set
Examples:
| Push Item To First Index In List Redis | ${redis_conn} | Country | Germany | Italy | France | Spain |
| ${list_items} | Get All Item From List Redis | ${redis_conn} | Country |
Result from ``Get All Item From List Redis``: [b'Spain', b'France', b'Italy', b'Germany']
"""
return redis_conn.lpush(list_name, *args)
@keyword('Push Item To Last Index In List Redis')
def push_item_to_last_index_in_list_redis(self, redis_conn, list_name, *args):
""" Push item to last index in list. If you many arguments, last arguments will be the last item.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- *args: Item that you need to put in set
Examples:
| Push Item To Last Index In List Redis | ${redis_conn} | Country | Germany | Italy | France | Spain |
| ${list_items} | Get All Item From List Redis | ${redis_conn} | Country |
Result from ``Get All Item From List Redis``: [b'Germany', b'Italy', b'France', b'Spain']
"""
return redis_conn.rpush(list_name, *args)
@keyword('Update Item In List Redis')
def update_item_in_list_redis(self, redis_conn, list_name, index, item):
"""Update item in list by specific index.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- index: Index in list that you need to update
- item: New item
Examples:
| Update Item In List Redis | ${redis_conn} | Country | 1 | England |
"""
return redis_conn.lset(list_name, index, item)
@keyword('Get Item From List Redis')
def get_item_from_list_redis(self, redis_conn, list_name, index):
"""Get item in list by specific index.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- index: Index in list that you need to update
Examples:
| ${item_data} | Get Item From List Redis | ${redis_conn} | Country | 1 |
"""
return redis_conn.lindex(list_name, index)
@keyword('Get All Item From List Redis')
def get_all_item_from_list_redis(self, redis_conn, list_name):
"""Get all items in list.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
Examples:
| ${list_items} | Get All Item From List Redis | ${redis_conn} | Country |
"""
return redis_conn.lrange(list_name, 0, -1)
@keyword('Get Length From List Redis')
def get_length_from_list_redis(self, redis_conn, list_name):
"""Get length of list.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
Examples:
| ${list_length} | Get Length From List Redis | ${redis_conn} | Country |
"""
return redis_conn.llen(list_name)
@keyword('Get Index of Item From List Redis')
def get_index_of_item_from_list_redis(self, redis_conn, list_name, item):
"""Get indexs of item that metched in list.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- item: Search item
Examples:
| ${list_index} | Get Index of Item From List Redis | ${redis_conn} | Country | Germany |
Keyword will return result as list of index: [0, 1, 5]
"""
return [i for i, j in enumerate(redis_conn.lrange(list_name, 0, -1)) if j == str.encode(item)]
@keyword('Get All Match Keys')
def get_all_match_keys(self, redis_conn, key, count=100):
""" Get all key that matches with specific keyword
Arguments:
- redis_conn: Redis connection object
- key: String keyword to find may contain wildcard.
- count: Element number of returns
Examples:
| @{key_list}= | Get All Match Keys | ${redis_conn} | BARCODE* | 1000 |
"""
return redis_conn.scan(0, key, count)[1]
@keyword('Delete Item From List Redis')
def delete_item_from_list_redis(self, redis_conn, list_name, index, item=None):
"""Delete data from list by specific index.
Arguments:
- redis_conn: Redis connection object
- list_name: List name as key in redis
- index: Index in list that you need to delete
- item: Compare item. If it is None, keyword will not compare with item in index.
But if is not None, keyword will compare it with item in index before delete.
If not matched keyword will failed.
Examples 1:
| Delete Item From List Redis | ${redis_conn} | Country | 2 |
Examples 2: keyword will compare it with item in index before delete.
If not matched keyword will failed
| Delete Item From List Redis | ${redis_conn} | Country | 2 | Spain |
"""
if item is not None:
if not redis_conn.lindex(list_name, index) == str.encode(item):
logger.error("Item: " + item + " not matched with index item in list.")
raise AssertionError
redis_conn.lset(list_name, index, 'DELETE_ITEM')
redis_conn.lrem(list_name, 1, 'DELETE_ITEM') | /robotframework-redislibrary-1.2.5.tar.gz/robotframework-redislibrary-1.2.5/RedisLibrary/RedisLibraryKeywords.py | 0.86378 | 0.356503 | RedisLibraryKeywords.py | pypi |
from abc import ABC
from robot.utils import is_truthy
from RemoteMonitorLibrary.model.chart_abstract import ChartAbstract
from RemoteMonitorLibrary.model.configuration import Configuration
from RemoteMonitorLibrary.model.runner_model import Parser, plugin_integration_abstract, plugin_runner_abstract,\
FlowCommands, Variable
from RemoteMonitorLibrary.runner.ssh_runner import SSHLibraryPlugInWrapper, SSHLibraryCommand, \
extract_method_arguments
from RemoteMonitorLibrary.model.registry_model import RegistryModule
class SSH_PlugInAPI(ABC, SSHLibraryPlugInWrapper, plugin_integration_abstract):
__doc__ = """SSHLibraryCommand execution in background thread
SSHLibraryCommand starting on adding to command pool within separate ssh session
Output collecting during execution and sending to parsing and loading to data handler
On connection interrupting command session restarting and output collecting continue
"""
@staticmethod
def normalise_arguments(prefix='return', func=is_truthy, **kwargs):
for k in kwargs.keys():
v = kwargs.get(k)
if k.startswith(prefix):
kwargs.update({k: func(v)})
return kwargs
class Common_PlugInAPI(ABC, plugin_integration_abstract, plugin_runner_abstract):
__doc__ = """Common plugin without specified action
"""
class ParseRC(Parser):
def __init__(self, expected_rc=0):
self._rc = expected_rc
def __call__(self, output: dict) -> bool:
rc = output.get('rc')
assert rc == self._rc, f"Command result not match expected one (Result: {rc} vs. Expected: {self._rc})"
__all__ = ['SSH_PlugInAPI',
'Common_PlugInAPI',
'FlowCommands',
SSHLibraryCommand.__name__,
'extract_method_arguments',
Parser.__name__,
Variable.__name__,
ParseRC.__name__,
ChartAbstract.__name__,
Configuration.__name__,
RegistryModule.__name__
] | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/api/plugins.py | 0.768038 | 0.201067 | plugins.py | pypi |
from RemoteMonitorLibrary.model.db_schema import Table, Field, FieldType, PrimaryKeys, Query, ForeignKey
class TraceHost(Table):
def __init__(self):
super().__init__(name='TraceHost')
self.add_field(Field('HOST_ID', FieldType.Int, PrimaryKeys(True)))
self.add_field(Field('HostName', FieldType.Text, not_null=True, unique=True))
class Points(Table):
def __init__(self):
Table.__init__(self, name='Points',
fields=(Field('HOST_REF', FieldType.Int), Field('PointName'), Field('Start'), Field('End')),
foreign_keys=[ForeignKey('HOST_REF', 'TraceHost', 'HOST_ID')],
queries=[Query('select_state', """SELECT {} FROM Points
WHERE HOST_REF = {} AND PointName = '{}'""")])
class LinesCacheMap(Table):
def __init__(self):
super().__init__(fields=[Field('OUTPUT_REF', FieldType.Int), Field('ORDER_ID', FieldType.Int),
Field('LINE_REF', FieldType.Int)],
foreign_keys=[ForeignKey('OUTPUT_REF', 'TimeMeasurement', 'OUTPUT_ID'),
ForeignKey('LINE_REF', 'LinesCache', 'LINE_ID')],
queries=[Query('last_output_id', 'select max(OUTPUT_REF) from LinesCacheMap')])
class LinesCache(Table):
def __init__(self):
Table.__init__(self, fields=[
Field('LINE_ID', FieldType.Int, PrimaryKeys(True)),
Field('HashTag', unique=True),
Field('Line')])
class PlugInTable(Table):
def add_time_reference(self):
self.add_field(Field('HOST_REF', FieldType.Int))
self.add_field(Field('TL_REF', FieldType.Int))
self.add_foreign_key(ForeignKey('TL_REF', 'TimeLine', 'TL_ID'))
self.add_foreign_key(ForeignKey('HOST_REF', 'TraceHost', 'HOST_ID'))
def add_output_cache_reference(self):
self.add_field(Field('OUTPUT_REF', FieldType.Int))
self.add_foreign_key(ForeignKey('OUTPUT_REF', 'LinesCacheMap', 'OUTPUT_REF'))
class TimeLine(Table):
def __init__(self):
Table.__init__(self, name='TimeLine',
fields=[Field('TL_ID', FieldType.Int, PrimaryKeys(True)), Field('TimeStamp', FieldType.Text)],
queries=[Query('select_last', 'SELECT TL_ID FROM TimeLine WHERE TimeStamp == "{timestamp}"')]
)
class log(PlugInTable):
FIELDS_TYPES = [
('asctime', FieldType.Int),
('levelname', FieldType.Text),
('threadName', FieldType.Text),
('module', FieldType.Text),
('funcName', FieldType.Text),
('msg', FieldType.Text),
('lineno', FieldType.Int),
('exc_text', FieldType.Text),
('process', FieldType.Int),
('thread', FieldType.Text),
('levelno', FieldType.Int),
('name', FieldType.Text)]
_shown_fields = [0, 1, 2, 3, 4, 5]
def __init__(self):
super().__init__()
for i in self._shown_fields:
f, t = self.FIELDS_TYPES[i]
self.add_field(Field(f.capitalize(), t))
@staticmethod
def format_record(record):
_result_record = []
for i in log._shown_fields:
f, _ = log.FIELDS_TYPES[i]
assert hasattr(record, f)
_result_record.append(getattr(record, f))
return tuple(_result_record) | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/api/db.py | 0.655557 | 0.162579 | db.py | pypi |
import re
from typing import Iterable
from SSHLibrary import SSHLibrary as RSSHLibrary
from RemoteMonitorLibrary import plugins_modules
from RemoteMonitorLibrary.api import model, db, services
from RemoteMonitorLibrary.api.plugins import *
from RemoteMonitorLibrary.model.errors import RunnerError
from RemoteMonitorLibrary.utils import logger
__doc__ = """
== SSHLibrary PlugIn ==
Periodical execute of SSHLibrary command sequence
=== PlugIn Arguments ===
- command_sequence: commands to be send to remote host periodically
- user_options: regular SSHLibrary keyword arguments (See in [http://robotframework.org/SSHLibrary/SSHLibrary.html#library-documentation-top|RF SSHLibrary help])
Plus optional three extra arguments allowed:
- rc: int; Last command should return [*]
- expected: str; Output should exist [**]
- prohibited: str; Output should not exists [**]
- tolerance: int; Count of errors allowed before test will be terminated (Default: 0)
-1 - errors will be ignored, just logged
* Support several values separated by '|'
** Support several values separated by '|' or '&' for OR and AND accordingly
=== Example ===
| Keyword | Arguments | Comments |
| `Start monitor plugin` | SSHLibrary /usr/bin/my_command | rc=0 [rest kwargs] | my_command will be evaluated for return RC=0 |
| `Start monitor plugin` | SSHLibrary /usr/bin/my_command | prohibited=Error [rest kwargs] | my_command will be evaluated for stderr doesn't contain word 'Error' |
=== Limitation ===
- SSHLibrary Plugin doesn't support interactive commands;
Note:
be aware to provide correct keyword arguments
"""
class sshlibrary_monitor(db.PlugInTable):
def __init__(self):
super().__init__('sshlibrary_monitor')
self.add_time_reference()
self.add_field(model.Field('Command'))
self.add_field(model.Field('Rc', model.FieldType.Int))
self.add_field(model.Field('Status'))
self.add_output_cache_reference()
class UserCommandParser(Parser):
def __init__(self, **kwargs):
super().__init__(table=services.TableSchemaService().tables.sshlibrary_monitor, **kwargs)
self._tolerance = self.options.get('tolerance')
self._tolerance_counter = 0
def __call__(self, output: dict) -> bool:
out = output.get('stdout', None)
err = output.get('stderr', None)
total_output = f'{out}' if out else ''
total_output += ('\n' if len(total_output) > 0 else '') + (f'{err}' if err else '')
rc = output.get('rc', -1)
exp_rc = self.options.get('rc', None)
expected = self.options.get('expected', None)
prohibited = self.options.get('prohibited', None)
errors = []
if exp_rc:
if not any([int(_rc) == rc for _rc in re.split(r'\s*\|\s*', exp_rc)]):
errors.append(f"Rc [{rc}] not match expected - {exp_rc}")
if expected:
if not any([pattern in total_output for pattern in re.split(r'\s*\|\s*', expected)]) or \
not all([pattern in total_output for pattern in re.split(r'\s*\&\s*', expected)]):
errors.append("Output not contain expected pattern [{}]".format(expected))
if prohibited:
if any([pattern in total_output for pattern in re.split(r'\s*\|\s*', prohibited)]) or \
not all([pattern not in total_output for pattern in re.split(r'\s*\&\s*', prohibited)]):
errors.append("Output contain prohibited pattern [{}]".format(prohibited))
if len(errors) > 0:
st = 'False'
msg = "\nErrors:\n\t{}\n\tRC: {}\nOutput:\n\t{}".format('\n\t'.join(errors),
rc,
'\n\t'.join(total_output.splitlines()))
logger.error(msg)
else:
st = 'Pass'
msg = 'Output:\n\t{}'.format('\n\t'.join(total_output.splitlines()))
output_ref = services.CacheLines().upload(msg)
du = services.data_factory(self.table,
self.table.template(self.host_id, None, self.options.get('command'), rc, st,
output_ref))
self.data_handler(du)
if st != 'Pass':
if self._tolerance == -1:
pass
elif self._tolerance_counter == self._tolerance:
raise RunnerError(f"{self}: Error count reach tolerance ({self._tolerance})", msg)
else:
self._tolerance_counter += 1
return True if st == 'Pass' else False
class SSHLibrary(SSH_PlugInAPI):
def __init__(self, parameters, data_handler, command, **user_options):
self._command = command
assert self._command, "Commands not provided"
user_options.update({'name': user_options.get('name', self._command)})
user_options.update({'tolerance': int(user_options.get('tolerance', 0))})
super().__init__(parameters, data_handler, **user_options)
user_options = self.normalise_arguments(**user_options)
if user_options.get('rc', None) is not None:
assert user_options.get('return_rc'), "For verify RC argument 'return_rc' must be provided"
if user_options.get('expected') or user_options.get('prohibited'):
if user_options.get('return_stdout', None) is not None and not user_options.get('return_stdout'):
assert user_options.get('return_stderr', None), \
"For verify expected pattern one of arguments 'return_stdout' or 'return_stderr' must be provided"
self.set_commands(FlowCommands.Command,
SSHLibraryCommand(RSSHLibrary.execute_command, self._command,
parser=UserCommandParser(host_id=self.host_id,
data_handler=self.data_handler,
name=self.name, command=self._command,
**self.options),
**dict(extract_method_arguments(RSSHLibrary.execute_command.__name__,
**self.options)))
)
@staticmethod
def affiliated_module():
return plugins_modules.SSH
@staticmethod
def affiliated_tables() -> Iterable[model.Table]:
return sshlibrary_monitor(),
if __name__ == '__main__':
t = sshlibrary_monitor()
print(f"{t}") | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/plugins_modules/sshlibrary_plugin.py | 0.617859 | 0.172834 | sshlibrary_plugin.py | pypi |
__doc__ = """# first time setup CentOS - for Ubuntu/Debian and others see https://www.cyberciti.biz/tips/compiling-linux-kernel-26.html
curl https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.11.10.tar.xz -o kernel.tar.xz
unxz kernel.tar.xz
tar xvf kernel.tar
cd linux-5.11.10/
cp -v /boot/config-$(uname -r) .config
sudo dnf -y group install "Development Tools"
sudo dnf -y install time ncurses-devel hmaccalc zlib-devel binutils-devel elfutils-libelf-devel bison flex openssl-devel make gcc
make defconfig
/usr/bin/time -f "$(date +'%Y-%m-%dT%H:%M:%S%:z'):\t%e real,\t%U user,\t%S sys,\t%M max_mem_kb,\t%F page_faults,\t%c involuntarily_ctx_swtc,\t%I file_inputs,\t%O file_outputs,\t%r socket_recieved,\t%s socket_sent" -a -o data.txt make -j 4 clean all
and you'll know something is happening when you start to see real compilation happening... by looking at the data.txt and seeing something like the following
2021-03-25T18:58:43+02:00: 679.22 real, 1082.81 user, 160.85 sys, 248824 max_mem_kb, 4 page_faults, 120697 involuntarily_ctx_swtc, 98104 file_inputs, 1492752 file_outputs, 0 socket_recieved, 0 socket_sent
#BTW the above was without mlp ... so let's make sure we run at least 10 samples without mlp and 10 samples after - to conclude on avg, max and min for without mlp and then for with mlp """
import os
import re
from typing import Iterable, Any, Tuple
from SSHLibrary import SSHLibrary
from robot.utils import DotDict
from RemoteMonitorLibrary.api import model, db, services
from RemoteMonitorLibrary.api.plugins import *
from RemoteMonitorLibrary.utils import logger
from .ssh_module import SSHModule as SSH
from RemoteMonitorLibrary.model.errors import RunnerError
from RemoteMonitorLibrary.utils import get_error_info
from RemoteMonitorLibrary.utils.sql_engine import DB_DATETIME_FORMAT
__doc__ = """
== Time plugin overview ==
Wrap linux /usr/bin/time utility for periodical execution and monitor of customer command for process io, memory, cpu, etc
Full documentation for time utility available on [https://linux.die.net/man/1/time|time man(1)]
=== Plugin parameters ===
Parameters can be supplied via keyword `Start monitor plugin` as key-value pairs
Time plugin arguments:
- command: str -> command to be executed and measured by time (Mandatory)
| Note: Pay attention not to redirect command stderr to stdout (avoid '2>&1');
| Time write to stderr by itself and it send to parser
- name: User friendly alias for command (Optional)
- start_in_folder: path to executable binary/script if execution by path not relevant (Optional)
- return_stdout: bool -> if true output store to cache in DB
- sudo: True if sudo required, False if omitted (Optional)
- sudo_password: True if password required for sudo, False if omitted (Optional)
On plugin start sudo and sudo_password will be replace with sudo password provided for connection module
Examples:
| Flags | What really executed |
| <command> | /usr/bin/time -f "..." 'command' > /dev/null |
| <command> start_in_folder=<folder> | cd <folder> ; /usr/bin/time -f "..." command > /dev/null |
| <command> start_in_folder=<folder> return_stdout=yes | cd <folder> ;/usr/bin/time -f "..." command |
| <command> start_in_folder=<folder> return_stdout=yes sudo=yes | cd <folder> ;sudo /usr/bin/time -f "..." command |
"""
DEFAULT_TIME_COMMAND = r'/usr/bin/time'
CMD_TIME_FORMAT = DotDict(
TimeReal="e",
TimeKernel="S",
TimeUser="U",
TimeCPU="P",
MemoryMaxResidentSize="M",
MemoryAverage="t",
MemoryAverageTotal="K",
MemoryAverageProcessData="D",
MemoryAverageProcessStack="p",
MemoryAverageProcessShared="X",
MemorySystemPageSize="Z",
MemoryMajorPageFaults="F",
MemoryMinorPageFaults="R",
MemoryProcessSwapped="W",
MemoryProcessContextSwitched="c",
MemoryWait="w",
IOInput="I",
IOOutput="O",
IOSocketRecieved="r",
IOSocketSent="s",
IOSignals="k",
Rc="x",
Command="C"
)
TIME_BG_SCRIPT = """
#!/bin/bash
cd {start_folder}
while :
do
{time_command} -f \\"TimeStamp:\\$(date +'{date_format}'),{format}\\" -o ~/time_data/{title}/.time_{title}.txt {command} > {output}
mv ~/time_data/{title}/.time_{title}.txt ~/time_data/{title}/time_{title}.txt
{mv_output}
sleep {interval}
done
"""
TIME_READ_SCRIPT = """
#!/bin/bash
cat ~/time_data/{title}/time_{title}.txt >&2
{read_output}
"""
TIME_NAME_CACHE = []
class TimeMeasurement(db.PlugInTable):
def __init__(self):
super().__init__('TimeMeasurement')
self.add_time_reference()
for f in [model.Field(f, model.FieldType.Int) for f in CMD_TIME_FORMAT.keys() if f != 'Command'] + \
[model.Field('Command')]:
self.add_field(f)
self.add_output_cache_reference()
class TimeChart(ChartAbstract):
def __init__(self, table: model.Table, title, *sections):
self._table = table
ChartAbstract.__init__(self, *(sections if len(sections) > 0 else self._table.columns))
self._title = title
@property
def sections(self):
return self._sections
@property
def title(self):
return self._title
@property
def file_name(self) -> str:
return "{name}.png"
@property
def get_sql_query(self) -> str:
return """
SELECT t.TimeStamp as TimeStamp, {select}, n.Command
FROM {table_name} n
JOIN TraceHost h ON n.HOST_REF = h.HOST_ID
JOIN TimeLine t ON n.TL_REF = t.TL_ID
WHERE h.HostName = '{{host_name}}' """.format(
select=', '.join([f"n.{c} as {c}" for c in self.sections]),
table_name=self._table.name)
def compose_sql_query(self, host_name, **kwargs) -> str:
sql_ = super().compose_sql_query(host_name=host_name, **kwargs)
return f"{sql_} AND n.Command = '{kwargs.get('command')}'"
def y_axes(self, data: [Iterable[Iterable]]) -> Iterable[Any]:
return [s.replace(self.title, '') for s in self.sections]
def __str__(self):
return f"{self.__class__.__name__}: {', '.join(self.sections)}"
def generate_chart_data(self, query_results: Iterable[Iterable], extension=None) -> \
Iterable[Tuple[str, Iterable, Iterable, Iterable[Iterable]]]:
data_series = {}
for row in query_results:
if row[-1] not in data_series.keys():
data_series.update({row[-1]: []})
data_series[row[-1]].append(row[:-1])
result = []
for cmd, row in data_series.items():
sub_chart = list(super().generate_chart_data(row, cmd))
sub_chart = list(sub_chart[0])
sub_chart[0] = sub_chart[0].split('_', 1)[0]
result.append(sub_chart)
return result
class TimeParser(Parser):
def __call__(self, outputs, datetime=None) -> bool:
command_out = outputs.get('stdout', None)
time_output = outputs.get('stderr', None)
rc = outputs.get('rc')
try:
exp_rc = self.options.get('rc', None)
if exp_rc:
if rc not in [int(_rc) for _rc in re.split(r'\s*\|\s*', exp_rc)]:
raise AssertionError(
f"Result return rc {rc} not match expected\nStdOut:\n\t{command_out}\nStdErr:\n\t{time_output}")
data = time_output.split(',')
row_dict = DotDict(**{k: v.replace('%', '') for (k, v) in [entry.split(':', 1) for entry in data]})
for k in row_dict.keys():
if k == 'Command':
continue
row_dict.update({k: float(row_dict[k])})
logger.info(f"Command: {row_dict.get('Command')} [Rc: {row_dict.get('Rc')}]")
row = self.table.template(self.host_id, None, *tuple(list(row_dict.values()) + [-1]))
du = services.DataUnit(self.table, row, output=command_out, datetime=datetime)
# du = self.data_handler(self.table, row, output=command_out, datetime=datetime)
self.data_handler(du)
return True
except Exception as e:
f, li = get_error_info()
logger.error(f"{self.__class__.__name__}: {e}; File: {f}:{li}")
raise RunnerError(f"{self}", f"{e}; File: {f}:{li}")
class TimeCachedParser(TimeParser):
def __call__(self, outputs, datetime=None):
time_output = outputs.get('stderr', None)
rc = outputs.get('rc')
if 'No such file or directory' in time_output and rc == 1:
logger.warn(f"Time command still not completed first iteration")
return True
assert rc == 0, f"Error RC occur - {outputs}"
time_stamp, time_output = time_output.split(',', 1)
_, datetime = time_stamp.split(':', 1)
outputs.update(**{'stderr': time_output})
return super().__call__(outputs, datetime=datetime)
class TimeStartCommand(SSHLibraryCommand):
def __init__(self, command, **user_options):
self._time_cmd = user_options.pop('time_cmd', DEFAULT_TIME_COMMAND)
self._format = ','.join([f"{name}:%{item}" for name, item in CMD_TIME_FORMAT.items()])
self._base_cmd = command
self._start_in_folder = user_options.pop('start_in_folder', None)
if not user_options.get('return_stdout', False):
self._base_cmd += ' > /dev/null'
command = "{cd_to_folder}{time} -f \"{format}\" {base_cmd}".format(
cd_to_folder=f'cd {self._start_in_folder}; ' if self._start_in_folder else '',
time=self._time_cmd,
format=self._format,
base_cmd=self._base_cmd
)
super().__init__(SSHLibrary.start_command, command,
**extract_method_arguments(SSHLibrary.start_command.__name__, **user_options))
def __str__(self):
return f"{self._method.__name__.replace('_', ' ').capitalize()}: " \
f"{f'cd {self._start_in_folder}; ' if self._start_in_folder else ''}" \
f"{self._time_cmd} -f \"...\" " \
f"{', '.join([f'{a}' for a in [self._base_cmd] + [f'{k}={v}' for k, v in self._ssh_options.items()]])}" \
f"{'; Parser: '.format(self.parser) if self.parser else ''}"
class TimeReadOutput(SSHLibraryCommand):
def __init__(self, **user_options):
self._format = ','.join([f"{name}:%{item}" for name, item in CMD_TIME_FORMAT.items()])
user_options.update({'return_stderr': True, 'return_rc': True})
super().__init__(SSHLibrary.read_command_output, parser=user_options.pop('parser', None),
**extract_method_arguments(SSHLibrary.read_command_output.__name__, **user_options))
def __str__(self):
return f"{self._method.__name__.replace('_', ' ').capitalize()}: " \
f"{', '.join([f'{k}={v}' for k, v in self._ssh_options.items()])}" \
f"'; Parser: {'assigned' if self.parser else 'N/A'}"
class GetPIDList(Variable):
def __call__(self, output):
self.result = {'pid_list': output.replace('\n', ' ')}
class Time(SSH_PlugInAPI):
def __init__(self, parameters, data_handler, *args, **user_options):
SSH_PlugInAPI.__init__(self, parameters, data_handler, *args, **user_options)
self._command = self.options.pop('command', None)
self._command_name = self.options.get('name', self.name)
self.options.update({'name': self._command_name})
assert self.id not in TIME_NAME_CACHE, f"Name '{self._command_name}' already exists"
TIME_NAME_CACHE.append(self.id)
self.options.update({'persistent': self.options.get('persistent', 'no')})
self._prefix = f"{self.__class__.__name__}_item:"
self._time_cmd = self.options.get('time_cmd', DEFAULT_TIME_COMMAND)
self._start_in_folder = self.options.get('start_in_folder', None)
if self._start_in_folder:
self._verify_folder_exist()
self.options.update({'start_in_folder': self._start_in_folder})
self._format = ','.join([f"{name}:%{item}" for name, item in CMD_TIME_FORMAT.items()])
assert self._command, "SSHLibraryCommand not provided"
self.options.update(**self.normalise_arguments(**self.options))
if self.options.get('rc', None) is not None:
assert self.options.get('return_rc'), "For verify RC argument 'return_rc' must be provided"
if self.persistent:
self.set_commands(FlowCommands.Command,
TimeStartCommand(self._command, **self.options),
TimeReadOutput(parser=TimeParser(host_id=self.host_id,
table=self.affiliated_tables()[0],
data_handler=self.data_handler, Command=self.name),
**self.options))
else:
time_write_script = TIME_BG_SCRIPT.format(
start_folder=f"{self._start_in_folder}" if self._start_in_folder else '',
time_command=self._time_cmd,
format=self._format,
command=self._command,
interval=self.interval,
output='/dev/null',
title=self.id,
# FIXME: ReturnStdout disabled due to performance issues; Pending fix in next releases
# output='~/time_data/.temp_output.txt' if self.options.get('return_stdout', False) else '/dev/null',
mv_output='mv ~/time_data/{t}/.temp_{t}_output.txt ~/time_data/output_{t}.txt'.format(t=self.id)
if self.options.get('return_stdout', False) else '',
date_format=DB_DATETIME_FORMAT
)
time_read_script = TIME_READ_SCRIPT.format(
title=self.id,
read_output='cat ~/time_data/{t}/output_{t}.txt >&1'.format(t=self.id)
if self.options.get('return_stdout', False) else ''
)
pid_list = GetPIDList()
self.set_commands(FlowCommands.Teardown,
SSHLibraryCommand(SSHLibrary.execute_command,
"ps -ef|egrep 'time_write_{}.sh|{}'|grep -v grep|"
"awk '{{{{print$2}}}}'".format(self.id, self._command),
sudo=self.sudo_expected,
sudo_password=self.sudo_password_expected,
return_stdout=True,
variable_setter=pid_list),
SSHLibraryCommand(SSHLibrary.execute_command, "kill -9 {pid_list}",
sudo=self.sudo_expected,
sudo_password=self.sudo_password_expected,
variable_getter=pid_list)
)
self.set_commands(FlowCommands.Setup,
self.teardown,
SSHLibraryCommand(SSHLibrary.execute_command,
f"mkdir -p ~/time_data/{self.id}",
return_rc=True),
SSHLibraryCommand(SSHLibrary.execute_command, f"rm -rf ~/time_data/{self.id}/*",
sudo=self.sudo_expected,
sudo_password=self.sudo_password_expected,
return_rc=True,
return_stderr=True),
SSHLibraryCommand(SSHLibrary.execute_command,
f"echo \"{time_write_script}\" > ~/time_data/{self.id}/time_write_{self.id}.sh",
return_rc=True, parser=ParseRC()),
SSHLibraryCommand(SSHLibrary.execute_command,
f"echo \"{time_read_script}\" > ~/time_data/{self.id}/time_read_{self.id}.sh",
return_rc=True, parser=ParseRC()),
SSHLibraryCommand(SSHLibrary.execute_command,
f'chmod +x ~/time_data/{self.id}/*.sh',
return_rc=True, parser=ParseRC()),
SSHLibraryCommand(SSHLibrary.start_command,
f'nohup ~/time_data/{self.id}/time_write_{self.id}.sh &')
)
self.set_commands(FlowCommands.Command,
SSHLibraryCommand(SSHLibrary.execute_command,
f'~/time_data/{self.id}/time_read_{self.id}.sh',
sudo=self.sudo_expected,
sudo_password=self.sudo_expected,
return_stderr=True,
return_rc=True,
parser=TimeCachedParser(host_id=self.host_id,
table=self.affiliated_tables()[0],
data_handler=self.data_handler,
Command=self.name)))
@property
def kwargs_info(self) -> dict:
return dict(command=self._command)
@property
def id(self):
return f"{self.__class__.__name__}_{self._command_name}"
def _verify_folder_exist(self):
with self.on_connection() as ssh:
if self._start_in_folder.startswith('~'):
_path = self._start_in_folder
user_home = ssh.execute_command('echo $HOME').strip()
_path = _path.replace('~', user_home)
logger.info(f"Expand folder {self._start_in_folder} to {_path}")
self._start_in_folder = _path
ssh.directory_should_exist(os.path.expanduser(self._start_in_folder))
@staticmethod
def affiliated_module():
return SSH
@staticmethod
def affiliated_tables() -> Iterable[model.Table]:
return TimeMeasurement(),
@staticmethod
def affiliated_charts() -> Iterable[ChartAbstract]:
base_table = TimeMeasurement()
return tuple(TimeChart(base_table, name, *[c.name for c in base_table.fields if c.name.startswith(name)])
for name in ('Time', 'Memory', 'IO'))
__all__ = [
Time.__name__,
__doc__
] | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/plugins_modules/time_plugin.py | 0.489992 | 0.155559 | time_plugin.py | pypi |
import os
from datetime import datetime, timedelta
from time import sleep
from robot.api.deco import keyword
from robot.utils import is_truthy, timestr_to_secs, secs_to_timestr
from RemoteMonitorLibrary.api import db, services
from RemoteMonitorLibrary.api.tools import GlobalErrors
from RemoteMonitorLibrary.library.listeners import *
from RemoteMonitorLibrary.runner import HostRegistryCache
from RemoteMonitorLibrary.utils import get_error_info
from RemoteMonitorLibrary.utils import logger
from RemoteMonitorLibrary.utils.sql_engine import insert_sql, update_sql, DB_DATETIME_FORMAT
class ConnectionKeywords:
__doc__ = """=== Connections keywords ===
`Create host monitor`
`Close host monitor`
`Terminate all monitors`
=== PlugIn's keywords ===
`Start monitor plugin`
`Stop monitor plugin`
=== Flow control ===
`Start period`
`Stop period`
`Wait`
`Set mark` - TBD
=== Examples ===
| ***** Settings *****
| Library RemoteMonitorLibrary
| Library BuiltIn
|
| Suite Setup run keyword Create host monitor SSH host=${HOST} username=${USER} password=${PASSWORD} ...
| ... AND Create host monitor WEB url=${URL} user=${USER} password=${PASSWORD} ...
| Suite Teardown terminate_all_monitors
|
| ***** Variables *****
| ${HOST} ...
| ${USER} ...
| ${PASSWORD} ...
| ${INTERVAL} 1s
| ${PERSISTENT} yes
| ${DURATION} 1h
|
| ${URL} ...
| ${W_USER} ...
| ${W_PASSWORD} ...
| ${UUID} ...
| ***** Tests *****
| Test Host monitor
| [Tags] ssh monitor
| Start monitor plugin aTop interval=${INTERVAL} persistent=${PERSISTENT}
| Start monitor plugin Time command=make -j 40 clean all interval=0.5s persistent=${PERSISTENT}
| ... name=Compilation start_in_folder=~/bm_noise/linux-5.11.10
| sleep ${DURATION} make something here
| Stop monitor plugin Time name=Complilation
| [Teardown] run keywords generate module statistics plugin=Time name=Compilation
| ... AND generate module statistics plugin=aTop
|
| Test WEB monitor
| [Tags] web monitor
| Start monitor plugin API interval=${INTERVAL} persistent=${PERSISTENT} protectorUuid=${UUID}
| ... protectorStatus=CONNECTED
|
| sleep ${DURATION} make something here
| Stop monitor plugin Time name=Complilation
| [Teardown] run keywords generate module statistics plugin=Time name=Compilation
| ... AND generate module statistics plugin=aTop
|
"""
def __init__(self, rel_location, file_name, **options):
self._modules = HostRegistryCache()
self.location, self.file_name, self.cumulative = \
rel_location, file_name, is_truthy(options.get('cumulative', False))
self._log_to_db = options.get('log_to_db', False)
self.ROBOT_LIBRARY_LISTENER = AutoSignPeriodsListener()
suite_start_kw = self._normalise_auto_mark(options.get('start_suite', None), 'start_period')
suite_end_kw = self._normalise_auto_mark(options.get('start_suite', None), 'stop_period')
test_start_kw = self._normalise_auto_mark(options.get('start_test', None), 'start_period')
test_end_kw = self._normalise_auto_mark(options.get('end_test', None), 'stop_period')
if suite_start_kw:
self.register_kw(AllowedHooks.start_suite, suite_start_kw)
if suite_end_kw:
self.register_kw(AllowedHooks.end_suite, suite_end_kw)
if test_start_kw:
self.register_kw(AllowedHooks.start_test, test_start_kw)
if test_end_kw:
self.register_kw(AllowedHooks.end_test, test_end_kw)
@staticmethod
def _normalise_auto_mark(custom_kw, default_kw):
if is_truthy(custom_kw) is True:
return default_kw
elif custom_kw is not None:
return custom_kw
return None
def _init(self):
output_location = BuiltIn().get_variable_value('${OUTPUT_DIR}')
services.DataHandlerService().init(os.path.join(output_location, self.location), self.file_name,
self.cumulative)
level = BuiltIn().get_variable_value('${LOG LEVEL}')
logger.setLevel(level)
rel_log_file_path = os.path.join(self.location, self.file_name)
abs_log_file_path = os.path.join(output_location, self.location, self.file_name)
logger.set_file_handler(abs_log_file_path)
if is_truthy(self._log_to_db):
services.TableSchemaService().register_table(db.log())
logger.addHandler(services.SQLiteHandler())
services.DataHandlerService().start()
logger.warn(f'<a href="{rel_log_file_path}">{self.file_name}</a>', html=True)
def get_keyword_names(self):
return [
self.create_host_monitor.__name__,
self.close_host_monitor.__name__,
self.terminate_all_monitors.__name__,
self.start_monitor_plugin.__name__,
self.stop_monitor_plugin.__name__,
self.start_period.__name__,
self.stop_period.__name__,
self.pause_monitor.__name__,
self.resume_monitor.__name__,
self.add_to_plugin.__name__,
self.remove_from_plugin.__name__,
self.set_mark.__name__,
self.wait.__name__,
self.register_kw.__name__,
self.unregister_kw.__name__,
self.get_current_errors.__name__
]
@keyword("Create host monitor")
def create_host_monitor(self, module_name, alias=None, **module_options):
"""Create basic host connection module used for trace host
Last created connection handled as 'current'
In case tracing required for one host only, alias can be ignored
# Arguments:
- alias: 'username@host:port' if omitted
- timeout : connection & command timeout
== Supported modules ==
=== SSH ===
Connection arguments:
- host: IP address, DNS name,
- username
- password
- port : 22 if omitted
- certificate : key file (.pem) Optional
=== SSH Examples ===
| KW | Module | Arguments | Comments |
| Create host monitor | SSH | host=127.0.0.1 username=any_user password=any_password | Default port; No alias |
| Create host monitor | SSH | host=127.0.0.1 username=any_user password= any_password port=2243 | Custom port; No alias |
| Create host monitor | SSH | host=127.0.0.1 username=any_user password= any_password alias=<name> | Custom port; Alias |
| Create host monitor | SSH | host=127.0.0.1 username=any_user password= any_password alias=${my_name} | Default port; Alias |
| Create host monitor | SSH | host=127.0.0.1 username=any_user password= any_password certificate=key_file.pem | Certificate file will be assigned |
=== WEB Examples ===
| KW | Module | Arguments | Comments |
| Create host monitor | WEB | url=www.d.com user=any_user password=any_password | Default port; No alias |
=== Auto start/stop periods ===
By default keyword `Start period`, `Stop period` assigned for start/end test accordingly following by test name
Can be overwritten by key value pairs
| listener method=keyword name
Where listener are one of:
| start_suite
| end_suite
| start_test
| end_test
"""
assert module_name in services.ModulesRegistryService().keys(), f"Module '{module_name}' not registered"
module_type = services.ModulesRegistryService().get(module_name)
if not services.DataHandlerService().is_active:
self._init()
try:
module = module_type(services.PlugInService(), services.DataHandlerService().add_data_unit,
alias=alias, **module_options)
module.start()
logger.info(f"Connection {module.alias} ready to be monitored")
_alias = self._modules.register(module, module.alias)
self._start_period(alias=module.alias)
except Exception as e:
BuiltIn().fatal_error(f"Cannot start module '{module_name}; Reason: {e}")
else:
return module.alias
@keyword("Close host monitor")
def close_host_monitor(self, alias=None):
"""
Stop all plugins_modules related to host by its alias
Arguments:
- alias: 'Current' used if omitted
"""
self._stop_period(alias)
self._modules.stop_current()
@keyword("Terminate all monitors")
def terminate_all_monitors(self):
"""
Terminate all active hosts & running plugins_modules
"""
for module in self._modules:
self._stop_period(module.alias)
self._modules.close_all()
services.DataHandlerService().stop()
@keyword("Start monitor plugin")
def start_monitor_plugin(self, plugin_name, *args, alias=None, **options):
"""
Start plugin by its name on host queried by options keys
Arguments:
- plugin_names: name must be one for following in loaded table, column 'Class'
- alias: host monitor alias (Default: Current if omitted)
- options: interval=... , persistent=yes/no,
extra parameters relevant for particular plugin can be found in `BuiltIn plugins_modules` section
"""
try:
monitor: services.RegistryModule = self._modules.get_connection(alias)
monitor.plugin_start(plugin_name, *args, **options)
except Exception as e:
f, li = get_error_info()
raise BuiltIn().fatal_error(f"{e}; File: {f}:{li}")
else:
logger.info(f"PlugIn '{monitor}' created")
@keyword("Stop monitor plugin")
def stop_monitor_plugin(self, plugin_name, alias=None, **options):
monitor = self._modules.get_connection(alias)
monitor.plugin_terminate(plugin_name, **options)
logger.info(f"PlugIn '{plugin_name}' stopped on {monitor.alias}", also_console=True)
@keyword("Pause monitor")
def pause_monitor(self, reason, alias=None):
"""
Pause monitor's plugins_modules (Actual for host reboot or network restart tests)
Arguments:
- reason: Pause reason text
- alias: Desired monitor alias (Default: current)
"""
monitor = self._modules.get_connection(alias)
monitor.pause_plugins()
self._start_period(reason, alias)
@keyword("Resume monitor")
def resume_monitor(self, reason, alias=None):
"""
Resume previously paused monitor (Actual for host reboot or network restart tests)
Arguments:
- reason: Pause reason text
- alias: Desired monitor alias (Default: current)
"""
monitor: services.RegistryModule = self._modules.get_connection(alias)
monitor.resume_plugins()
self._stop_period(reason, alias)
@keyword("Add to Plugin")
def add_to_plugin(self, plugin_name, *args, **kwargs):
"""
Add to Plugin - allow runtime modify (adding) plugin configuration
Particular PlugIn's options see in `BuiltIn plugins_modules`
Arguments:
- plugin_name:
- alias:
- args: Plugin related unnamed arguments
- kwargs: Plugin related named arguments
"""
alias = kwargs.pop('alias', None)
monitor: services.RegistryModule = self._modules.get_connection(alias)
plugins = monitor.get_plugin(plugin_name)
assert len(plugins) > 0, f"Plugin '{plugin_name}{f' ({alias})' if alias else ''}' not started"
for plugin in plugins:
plugin.upgrade_plugin(*args, **kwargs)
@keyword("Remove from Plugin")
def remove_from_plugin(self, plugin_name, *args, **kwargs):
"""
Remove from Plugin - allow runtime modify (reducing) plugin configuration
Particular PlugIn's options see in `BuiltIn plugins_modules`
Arguments:
- plugin_name:
- alias:
- args: Plugin related unnamed arguments
- kwargs: Plugin related named arguments
"""
alias = kwargs.pop('alias', None)
monitor: services.RegistryModule = self._modules.get_connection(alias)
plugins = monitor.get_plugin(plugin_name)
assert len(plugins) > 0, f"Plugin '{plugin_name}{f' ({alias})' if alias else ''}' not started"
for plugin in plugins:
plugin.downgrade_plugin(*args, **kwargs)
@keyword("Start period")
def start_period(self, period_name=None, alias=None):
"""
Start period keyword
Arguments:
- period_name: Name of period to be started
- alias: Connection alias
"""
self._start_period(period_name, alias)
def _start_period(self, period_name=None, alias=None):
module: services.RegistryModule = self._modules.get_connection(alias)
table = services.TableSchemaService().tables.Points
services.DataHandlerService().execute(insert_sql(table.name, table.columns),
module.host_id, period_name or module.alias,
datetime.now().strftime(DB_DATETIME_FORMAT),
None)
@keyword("Stop period")
def stop_period(self, period_name=None, alias=None):
"""
Stop period keyword
Arguments:
- period_name: Name of period to be stopped
- alias: Connection alias
"""
self._stop_period(period_name, alias)
def _stop_period(self, period_name=None, alias=None):
module: services.RegistryModule = self._modules.get_connection(alias)
table = services.TableSchemaService().tables.Points
point_name = rf"{period_name or module.alias}"
services.DataHandlerService().execute(update_sql(table.name, 'End',
HOST_REF=module.host_id, PointName=point_name),
datetime.now().strftime(DB_DATETIME_FORMAT))
@keyword("Wait")
def wait(self, timeout, reason=None, reminder='1h'):
"""
Wait are native replacement for keyword 'sleep' from BuiltIn library
Difference: wait exit in case Any global errors occurred within active Plugins
Arguments:
- timeout: String in robot format (20, 1s, 1h, etc.)
- reason: Any string to indicate exit if no errors occurred
- reminder: Log out remain time each <remind> period
"""
timeout_sec = timestr_to_secs(timeout)
end_time = datetime.now() + timedelta(seconds=timeout_sec)
next_reminder_time = datetime.now() + timedelta(seconds=timestr_to_secs(reminder))
BuiltIn().log(f"Waiting {timeout} ({timeout_sec}sec.) till {end_time.strftime('%F %H:%H:%S')}", console=True)
while datetime.now() <= end_time:
if len(GlobalErrors()) > 0:
BuiltIn().fail("Global error occurred: {}".format('\n\t'.join([f"{e}" for e in GlobalErrors()])))
elif datetime.now() >= next_reminder_time:
BuiltIn().log(f"Remain {secs_to_timestr((end_time - datetime.now()).total_seconds(), compact=True)}",
console=True)
next_reminder_time = datetime.now() + timedelta(seconds=timestr_to_secs(reminder))
sleep(1)
if reason:
BuiltIn().log(reason)
@keyword("Set mark")
def set_mark(self, mark_name, alias=None):
module: services.RegistryModule = self._modules.get_connection(alias)
table = services.TableSchemaService().tables.Points
services.DataHandlerService().execute(update_sql(table.name, 'Mark',
HOST_REF=module.host_id, PointName=mark_name),
datetime.now().strftime(DB_DATETIME_FORMAT))
@keyword("Get Current RML Errors")
def get_current_errors(self):
return GlobalErrors()
@keyword("Register KW")
def register_kw(self, hook: AllowedHooks, kw_name, *args, **kwargs):
"""
Register keyword to listener
Arguments:
- hook: one of start_suite, end_suite, start_test, end_test
- kw_name: Keyword name
- args: unnamed arguments
- kwargs: named arguments
"""
self.ROBOT_LIBRARY_LISTENER.register(hook, kw_name, list(args) + [f"{k}={v}" for k, v in kwargs.items()])
@keyword("Unregister kw")
def unregister_kw(self, hook: AllowedHooks, kw_name):
"""
Unregister keyword from listener
- hook: one of start_suite, end_suite, start_test, end_test
- kw_name: Keyword name
"""
self.ROBOT_LIBRARY_LISTENER.unregister(hook, kw_name) | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/library/connection_keywords.py | 0.560012 | 0.151749 | connection_keywords.py | pypi |
from collections import namedtuple
from enum import Enum
from typing import List, Iterable, Tuple, AnyStr
from robot.utils import DotDict
from RemoteMonitorLibrary.utils import sql
class FieldType(Enum):
Int = 'INTEGER'
Text = 'TEXT'
Real = 'REAL'
class PrimaryKeys:
def __init__(self, auto_increment=False):
self._auto_increment = auto_increment
def __str__(self):
return ' PRIMARY KEY' + ' AUTOINCREMENT' if self._auto_increment else ''
class Field:
def __init__(self, name, type_: FieldType = None, primary_key: PrimaryKeys = None,
not_null=False, unique=False):
"""
Table field definition
:param name: Table name string
:param type_: field type (INTEGER, TEXT, REAL)
"""
self._name: str = name
self._type: FieldType = type_ or FieldType.Text
self._primary_key = primary_key
self._not_null = not_null
self._unique = unique
@property
def name(self):
return self._name
@property
def type(self):
return self._type
@property
def primary_key(self):
return f" {self._primary_key}" if self._primary_key else ''
@property
def not_null(self):
return ' NOT NULL' if self._not_null else ''
@property
def unique(self):
return ' UNIQUE' if self._unique else ''
def __str__(self):
return f"{self.name} {self.type.value}{self.not_null}{self.unique}{self.primary_key}"
class Query:
def __init__(self, name: str, sql: str):
"""
Query assigned for Table
:param name: query name string
:param sql: SQL statement in python format (Mandatory variables)
"""
self._name = name
self._sql = sql
@property
def name(self):
return self._name
@property
def sql(self):
return self._sql
def __call__(self, *args, **kwargs):
return self.sql.format(*args, **kwargs)
class ForeignKey:
def __init__(self, own_field, foreign_table, foreign_field):
"""
Foreign key definition for table
:param own_field: Own field name
:param foreign_table: Foreign table name
:param foreign_field: Foreign field name
"""
self._own_field = own_field
self._table = foreign_table
self._field = foreign_field
@property
def own_field(self):
return self._own_field
@property
def foreign_table(self):
return self._table
@property
def foreign_field(self):
return self._field
def __str__(self):
return sql.FOREIGN_KEY_TEMPLATE.format(local_field=self.own_field,
foreign_table=self.foreign_table,
foreign_field=self.foreign_field)
def __hash__(self):
return hash(str(self))
def clone(self):
return type(self)(self.own_field, self.foreign_table, self.foreign_field)
class Table(object):
def __init__(self, name=None, fields: Iterable[Field] = [], queries: Iterable[Query] = [],
foreign_keys: List[ForeignKey] = []):
self._name = name or self.__class__.__name__
self._fields = tuple()
for f in fields:
self.add_field(f)
self._foreign_keys = tuple()
for fk in foreign_keys:
self.add_foreign_key(fk)
self._queries: DotDict[str, Query] = DotDict()
for query in queries or []:
self._queries[query.name] = query
@property
def template(self):
return namedtuple(self.name, (f.name for f in self.fields))
@property
def fields(self) -> Tuple:
return self._fields
def add_field(self, field: Field):
assert field not in self.fields, f"Field '{field}' already exist"
self._fields = tuple(list(self.fields) + [field])
@property
def columns(self) -> List[AnyStr]:
return [f.name for f in self.fields]
def __str__(self):
return f"{self.name}: {', '.join(self.columns)}"
@property
def name(self):
return self._name
@property
def queries(self):
return self._queries
@property
def foreign_keys(self) -> Tuple:
return self._foreign_keys
def add_foreign_key(self, fk: ForeignKey):
assert fk not in self.fields, f"Foreign Key '{fk}' already exist"
self._foreign_keys = tuple(list(self._foreign_keys) + [fk]) | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/model/db_schema.py | 0.895547 | 0.157202 | db_schema.py | pypi |
import warnings
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Iterable, Tuple, Any
from RemoteMonitorLibrary.utils.logger_helper import logger
warnings.filterwarnings("ignore")
INPUT_FMT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FMT = '%H:%M:%S'
def time_string_reformat_cb(from_format, to_format):
def time_string_reformat(time_stamp):
try:
return datetime.strptime(time_stamp, from_format).strftime(to_format)
except Exception as e:
logger.error(f"Cannot convert time string: {time_stamp}")
return time_string_reformat
class ChartAbstract(ABC):
def __init__(self, *sections):
self._sections = sections
self._verify_sql_query_for_variables()
self._ext = '.png'
def _verify_sql_query_for_variables(self):
assert '{host_name}' in self.get_sql_query, "Variable '{host_name} missing query text"
@property
def sections(self):
return self._sections
@property
@abstractmethod
def get_sql_query(self) -> str:
raise NotImplementedError()
def compose_sql_query(self, host_name, **kwargs) -> str:
_sql = self.get_sql_query.format(host_name=host_name)
_start = kwargs.get('start_mark', None)
if _start:
_sql += f" AND \"{_start}\" <= t.TimeStamp"
_end = kwargs.get('end_mark', None)
if _end:
_sql += f" AND \"{_end}\" >= t.TimeStamp"
return _sql
@property
@abstractmethod
def file_name(self) -> str:
raise NotImplementedError()
@property
def title(self):
return self.__class__.__name__
def y_axes(self, data: [Iterable[Iterable]]) -> Iterable[Any]:
return [r[0] for r in data]
@staticmethod
def get_y_limit(data):
return max([max(y) for y in [x[1:] for x in data]])
@staticmethod
def x_axes(data, time_columns=0, formatter=time_string_reformat_cb(INPUT_FMT, OUTPUT_FMT)) -> Iterable[Any]:
return [formatter(i[time_columns]) if formatter else i[time_columns] for i in data]
@staticmethod
def data_area(data: [Iterable[Iterable]]) -> [Iterable[Iterable]]:
return [r[1:] for r in data]
def generate_chart_data(self, query_results: Iterable[Iterable], extension=None) \
-> Iterable[Tuple[str, Iterable, Iterable, Iterable[Iterable]]]:
title = self.title + (f'_{extension}' if extension else '')
return (title,
self.x_axes(query_results),
self.y_axes(query_results),
self.data_area(query_results)), | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/model/chart_abstract.py | 0.704058 | 0.269136 | chart_abstract.py | pypi |
from typing import Dict, AnyStr, Tuple
from robot.utils import DotDict
from RemoteMonitorLibrary.utils.sys_utils import get_error_info
class Configuration:
def __init__(self, schema: Dict[AnyStr, Tuple], **kwargs):
self.schema = schema
self._parameters = DotDict()
err = []
for attr, (mandatory, _, _, _) in self.schema.items():
try:
if mandatory:
assert attr in kwargs.keys(), f"Mandatory parameter '{attr}' missing"
self._set_parameter(attr, kwargs.get(attr, None))
except AssertionError as e:
err.append(f"{e}")
except Exception as e:
f, l = get_error_info()
err.append(f"Unexpected error occurred during handle parameter '{attr}'; File: {f}:{l} - Error: {e}")
assert len(err) == 0, "Following fields errors occurred:\n\t{}".format('\n\t'.join(err))
def _set_parameter(self, parameter, value):
attr_template = self.schema.get(parameter, None)
assert attr_template, f"Unknown parameter '{parameter}' provided"
_, default, formatter, type_ = attr_template
if type_:
if value:
param_value = formatter(value) if not isinstance(value, type_) else value
else:
param_value = default
else:
param_value = value
self._parameters[parameter] = param_value
@property
def parameters(self):
return self._parameters
@property
def alias(self):
return self.parameters.alias
def update(self, dict_: dict = None, **kwargs):
dict_ = dict_ or {}
dict_.update(**kwargs)
unexpected = {}
for name, value in dict_.items():
try:
self._set_parameter(name, value)
except AssertionError:
unexpected.update({name: value})
return unexpected
def clone(self):
return type(self)(self.schema, **self.parameters) | /robotframework_remote_monitor_library-2.8.6-py3-none-any.whl/RemoteMonitorLibrary/model/configuration.py | 0.692122 | 0.24083 | configuration.py | pypi |
# Robot Framework Remote Runner
[](https://github.com/chrisBrookes93/robotframework-remoterunner/actions)
[](https://badge.fury.io/py/robotframework-remoterunner)
This library provides a robotframework agent,
and accompanying robot executor script that allows you to execute Robot Framework Test Suites remotely.
It's designed to be a lightweight agent and can be used as an alternative,
or with a CI Agent (e.g. Jenkins Agent). The executor script parses Test Suites and packages them up with their dependencies before making an RPC call to the agent.
The agent then writes all Test Suites and resources to a temporary directory and executes a robot run,
returning the test result artifacts back to the invoking host.
This library is distinctly different, and not to be confused with [PythonRemoteServer](https://github.com/robotframework/PythonRemoteServer)
which provides remote execution during a test run via the RemoteLib.
## Installation
Python Dependencies:
* robotframework >= 3.1.1
* six
To install the package and its runtime dependencies run:
```text
pip install robotframework-remoterunner
```
This package will need to be installed on the agent host, and the host you wish to execute the remote run from.
## Usage:
This library contains two scripts:
* *rfagent* - The agent that receives and executes the robot run.
* *rfremoterun* - The script that invokes the agent to execute the robot run.
### rfagent
Once installed, the agent can be launched by running the ```rfagent``` script:
```text
C:\>rfagent -h
usage: rfagent [-h] [-a ADDRESS] [-p PORT] [-d]
Script to launch the robotframework agent. This opens an RPC port and waits
for a request to execute a robot framework test execution
optional arguments:
-h, --help show this help message and exit
-a ADDRESS, --address ADDRESS
Address to bind to. Default is 0.0.0.0
-p PORT, --port PORT Port to listen on. Default is 1471
-d, --debug Enables debug logging and will not delete the
temporary directory after a robot run
```
Example usage:
```text
C:\rfagent -a 192.168.56.102 -p 1471
Listening on 192.168.56.102:1471
```
### rfremoterun
Once installed, the a Test Suite can be executed remotely by running the ```rfremoterun``` script:
```text
C:\DEV>rfremoterun -h
usage: rfremoterun [-h] [--debug] [-d OUTPUTDIR] [-o OUTPUT] [-l LOG]
[-r REPORT] [-F EXTENSION] [-s SUITE] [-t TEST]
[-i INCLUDE] [-e EXCLUDE] [-L LOGLEVEL]
host suites [suites ...]
Script to initiate a remote robot framework test execution
positional arguments:
host IP or Hostname of the server to execute the robot run
on. You can optionally specify the port the server is
listening on by adding ":<port>". If not specified the
port will be defaulted to 1471
suites One or more paths to test suites or directories
containing test suites
optional arguments:
-h, --help show this help message and exit
--debug Run in debug mode. This will enable debug logging and
does not cleanup the workspace directory on the remote
machine after test execution
-d OUTPUTDIR, --outputdir OUTPUTDIR
Where to create the output files on this machine once
they've been retrieved. The default is the directory
that this script is run from
-o OUTPUT, --output OUTPUT
Where to save the XML output file on this machine once
its been retrieved. Given path, similarly as paths
given to --log and --report is path. Other output
files are created based on XML output files after the
test execution and XML outputs can also be further
processed with Rebot tool. Default: remote_output.xml
-l LOG, --log LOG Where to save the HTML Log file on this machine once
its been retrieved. Default: remote_log.html
-r REPORT, --report REPORT
Where to save the HTML Report file on this machine
once its been retrieved. Default: remote_report.html
-F EXTENSION, --extension EXTENSION
Parse only files with this extension when executing a
directory. Has no effect when running individual files
or when using resource files. If more than one
extension is needed, separate them with a colon.
Examples: `--extension robot`, `-F robot:txt`
-s SUITE, --suite SUITE
Select test suites to run by name. When this option is
used with --test, --include or --exclude, only test
cases in matching suites and also matching other
filtering criteria are selected. Name can be a simple
pattern similarly as with --test and it can contain
parent name separated with a dot. You can specify
multiple filters by concatenating with a colon. For
example `-s X.Y` selects suite `Y` only if its parent
is `X`. -s X:Y:Z selects X, Y & Z
-t TEST, --test TEST Select test cases to run by name or long name. Name is
case insensitive and it can also be a simple pattern
where `*` matches anything and `?` matches any char.
To specify multiple, concatenate with a colon.
Example: -t Foo*:Bar*
-i INCLUDE, --include INCLUDE
Select test cases to run by tag. Similarly as name
with --test, tag is case and space insensitive and it
is possible to use patterns with `*` and `?` as
wildcards. Tags and patterns can also be combined
together with `AND`, `OR`, and `NOT` operators.
Examples: --include foo, --include bar*, --include
fooANDbar*
-e EXCLUDE, --exclude EXCLUDE
Select test cases not to run by tag. These tests are
not run even if included with --include. Tags are
matched using the rules explained with --include.
-L LOGLEVEL, --loglevel LOGLEVEL
Threshold level for logging. Available levels: TRACE,
DEBUG, INFO (default), WARN, NONE (no logging). Use
syntax `LOGLEVEL:DEFAULT` to define the default
visible log level in log files. Examples: --loglevel
DEBUG --loglevel DEBUG:INFO
```
The executor script currently supports a subset of the arguments that ```robot.run``` supports.
Example usage:
```text
C:\DEV> rfremoterun 192.168.56.102 C:\DEV\robotframework-remoterunner\tests\robot\ --loglevel DEBUG --outputdir ./
Connecting to: http://192.168.56.102:1471
Robot execution response:
==============================================================================
Root
==============================================================================
Root.TS1
==============================================================================
TS1.1 | PASS |
------------------------------------------------------------------------------
Root.TS1 | PASS |
1 critical test, 1 passed, 0 failed
1 test total, 1 passed, 0 failed
==============================================================================
Root | PASS |
1 critical test, 1 passed, 0 failed
1 test total, 1 passed, 0 failed
==============================================================================
Output: c:\users\user1\appdata\local\temp\tmpy26cmp\output.xml
Log: c:\users\user1\appdata\local\temp\tmpy26cmp\log.html
Report: c:\users\user1\appdata\local\temp\tmpy26cmp\report.html
Local Output: C:\DEV\remote_output.xml
Local Log: C:\DEV\remote_log.html
Local Report: C:\DEV\remote_report.html
```
## Issues/Limitations:
- HTTPS is not yet supported
- Any Python Keyword libraries' dependencies are not packaged up and sent to the remote host.
Any external Python packages that the Keywords rely on will need to be installed on the remote host.
- It is strongly recommended to run the agent and the executor against the same version of robotframework.
There are subtle differences in the way robotframework parses arguments and you may experience unexpected behaviour.
- Although there is backward support for earlier versions of robotframework, this package assumes modern versions.
You should take care that your underlying robotframework version supports the arguments that you require.
## Future Features:
- Add support for HTTPS
- Extend Executor script to support all ```robot.run``` arguments.
- Add support for Robot Variable files.
- Implement an asynchronous mode with the ability to poll the agent for a status on a particular robot execution.
- Add support to run on multiple hosts (concurrently).
| /robotframework-remoterunner-2.0.0.tar.gz/robotframework-remoterunner-2.0.0/README.md | 0.679072 | 0.881155 | README.md | pypi |
import os
import logging
import re
import six.moves.xmlrpc_client as xmlrpc_client
import six
from robot.api import TestSuiteBuilder
from robot.libraries import STDLIBS
from robot.utils.robotpath import find_file
from rfremoterunner.utils import normalize_xmlrpc_address, calculate_ts_parent_path, read_file_from_disk
logger = logging.getLogger(__file__)
DEFAULT_PORT = 1471
IMPORT_LINE_REGEX = re.compile('(Resource|Library)([\\s]+)([^[\\n\\r]*)([\\s]+)')
class RemoteFrameworkClient:
def __init__(self, address, debug=False):
"""
Constructor for RemoteFrameworkClient
:param address: Hostname/IP of the server with optional :Port
:type address: str
:param debug: Run in debug mode. Enables extra logging and instructs the remote server not to cleanup the
workspace after test execution
:type debug: bool
"""
self._address = normalize_xmlrpc_address(address, DEFAULT_PORT)
self._client = xmlrpc_client.ServerProxy(self._address)
self._debug = debug
self._dependencies = {}
self._suites = {}
logger.setLevel(logging.DEBUG if debug else logging.INFO)
def execute_run(self, suite_list, extensions, include_suites, robot_arg_dict):
"""
Sources a series of test suites and then makes the RPC call to the
agent to execute the robot run.
:param suite_list: List of paths to test suites or directories containing test suites
:type suite_list: list
:param extensions: String that filters the accepted file extensions for the test suites
:type extensions: str
:param include_suites: List of strings that filter suites to include
:type include_suites: list
:param robot_arg_dict: Dictionary of arguments that will be passed to robot.run on the remote host
:type robot_arg_dict: dict
:return: Dictionary containing stdout/err, log html, output xml, report html, return code
:rtype: dict
"""
# Use robot to resolve all of the test suites
suite_list = [os.path.normpath(p) for p in suite_list]
logger.debug('Suite List: %s', str(suite_list))
# Let robot do the heavy lifting in parsing the test suites
builder = self._create_test_suite_builder(include_suites, extensions)
suite = builder.build(*suite_list)
# Now iterate the suite's family tree, pull out the suites with test cases and resolve their dependencies.
# Package them up into a dictionary that can be serialized
self._package_suite_hierarchy(suite)
# Make the RPC
logger.info('Connecting to: %s', self._address)
response = self._client.execute_robot_run(self._suites, self._dependencies, robot_arg_dict, self._debug)
return response
@staticmethod
def _create_test_suite_builder(include_suites, extensions):
"""
Construct a robot.api.TestSuiteBuilder instance. There are argument name/type changes made at
robotframework==3.2. This function attempts to initialize a TestSuiteBuilder instance assuming
robotframework>=3.2, and falls back the the legacy arguments on exception.
:param include_suites: Suites to include
:type include_suites: list
:param extensions: string of extensions using a ':' as a join character
:return: TestSuiteBuilder instance
:rtype: robot.api.TestSuiteBuilder
"""
if extensions:
split_ext = list(ext.lower().lstrip('.') for ext in extensions.split(':'))
else:
split_ext = ['robot']
try:
builder = TestSuiteBuilder(include_suites, included_extensions=split_ext)
except TypeError:
# Pre robotframework 3.2 API
builder = TestSuiteBuilder(include_suites, extension=extensions) # pylint: disable=unexpected-keyword-arg
return builder
def _package_suite_hierarchy(self, suite):
"""
Parses through a Test Suite and its child Suites and packages them up into a dictionary so they can be
serialized
:param suite: robot test suite
:type suite: TestSuite
"""
# Empty suites in the hierarchy are likely directories so we're only interested in ones that contain tests
if suite.tests:
# Use the actual filename here rather than suite.name so that we preserve the file extension
suite_filename = os.path.basename(suite.source)
self._suites[suite_filename] = self._process_test_suite(suite)
# Recurse down and process child suites
for sub_suite in suite.suites:
self._package_suite_hierarchy(sub_suite)
def _process_test_suite(self, suite):
"""
Processes a TestSuite containing test cases and performs the following:
- Parses the suite's dependencies (e.g. Library & Resource references) and adds them into the `dependencies`
dict
- Corrects the path references in the suite file to where the dependencies will be placed on the remote side
- Returns a dict with metadata alongside the updated test suite file data
:param suite: a TestSuite containing test cases
:type suite: robot.running.model.TestSuite
:return: Dictionary containing the suite file data and path from the root directory
:rtype: dict
"""
logger.debug('Processing Test Suite: %s', suite.name)
# Traverse the suite's ancestry to work out the directory path so that it can be recreated on the remote side
path = calculate_ts_parent_path(suite)
# Recursively parse and process all dependencies and return the patched test suite file
updated_file = self._process_robot_file(suite)
return {
'path': path,
'suite_data': updated_file
}
def _process_robot_file(self, source):
"""
Processes a robot file (could be a Test Suite or a Resource) and performs the following:
- Parses the files's robot dependencies (e.g. Library & Resource references) and adds them into the
`dependencies` dict
- Corrects the path references in the suite file to where the dependencies will be placed on the remote side
- Returns the updated robot file data
:param source: a Robot file or a path to a robot file
:type source: robot.running.model.TestSuite | str
:return: Dictionary containing the suite file data and path from the root directory
:rtype: dict
"""
if isinstance(source, six.string_types):
file_path = source
is_test_suite = False
else:
file_path = source.source
is_test_suite = True
modified_file_lines = []
# Read the actual file from disk
file_lines = read_file_from_disk(file_path, into_lines=True)
for line in file_lines:
# Check if the current line is a Library or Resource import
matches = IMPORT_LINE_REGEX.search(line)
if matches and len(matches.groups()) == 4:
imp_type = matches.group(1)
whitespace_sep = matches.group(2)
res_path = matches.group(3)
# Replace the path with just the filename. They will be in the PYTHONPATH on the remote side so only
# the filename is required.
filename = os.path.basename(res_path)
line_ending = matches.group(4)
# Rebuild the updated line and append
modified_file_lines.append(imp_type + whitespace_sep + filename + line_ending)
# If this not a dependency we've already dealt with and not a built-in robot library
# (e.g. robot.libraries.Process)
if filename not in self._dependencies and \
not res_path.strip().startswith('robot.libraries') \
and res_path.strip() not in STDLIBS:
# Find the actual file path
full_path = find_file(res_path, os.path.dirname(file_path), imp_type)
if imp_type == 'Library':
# If its a Library (python file) then read the data and add to the dependencies
self._dependencies[filename] = read_file_from_disk(full_path)
else:
# If its a Resource, recurse down and parse it
self._process_robot_file(full_path)
else:
modified_file_lines.append(line)
new_file_data = ''.join(modified_file_lines)
if not is_test_suite:
self._dependencies[os.path.basename(file_path)] = new_file_data
return new_file_data | /robotframework-remoterunner-2.0.0.tar.gz/robotframework-remoterunner-2.0.0/src/rfremoterunner/rf_client.py | 0.733165 | 0.239185 | rf_client.py | pypi |
import argparse
import os
ROBOT_RUN_ARGS = ['loglevel', 'include', 'test', 'exclude', 'suite', 'extension']
class ExecutorArgumentParser:
def __init__(self, args):
"""
Constructor for ExecutorArgumentParser
:param args: Arguments to process (probably stdin)
:type args: list
"""
self._parser = self._init_parser()
parsed_args = self._parser.parse_args(args)
# Because of some limitations in argparse and not being able to specify multiple arguments of the same name,
# we have to do some extra translation
if parsed_args.suite:
parsed_args.suite = parsed_args.suite.split(':')
# Split the args based on whether they're for the executor, or whether they need to be passed into robot.run on
# the remote host
self.robot_run_args = {}
for arg_name, arg_val in parsed_args.__dict__.items():
if arg_name in ROBOT_RUN_ARGS:
if arg_val is not None:
self.robot_run_args[arg_name] = arg_val
setattr(self, arg_name, arg_val)
@staticmethod
def _init_parser():
"""
Initialise the ArgumentParser instance with the supported arguments
:return: Argument parser instance
:rtype: ArgumentParser
"""
parser = argparse.ArgumentParser(description='Script to initiate a remote robot framework test execution')
parser.add_argument('host',
help='IP or Hostname of the server to execute the robot run on. You can optionally specify '
'the port the server is listening on by adding ":<port>". If not specified the port '
'will be defaulted to 1471')
parser.add_argument('suites', nargs='+',
help='One or more paths to test suites or directories containing test suites')
parser.add_argument('--debug',
help='Run in debug mode. This will enable debug logging and does not cleanup the workspace '
'directory on the remote machine after test execution', action='store_true')
# Although these arguments are ones that robot accepts, they won't be passed into the remote robot execution.
# The output artifacts will be placed in the workspace directory on the remote host and when pulled back they
# are saved to the local machine as per configured by the arguments below
parser.add_argument('-d', '--outputdir',
help='Where to create the output files on this machine once they\'ve been retrieved. The '
'default is the directory that this script is run from',
default='.')
parser.add_argument('-o', '--output',
help='Where to save the XML output file on this machine once its been retrieved. Given '
'path, similarly as paths given to --log and --report is path. Other output files are '
'created based on XML output files after the test execution and XML outputs can also '
'be further processed with Rebot tool. Default: remote_output.xml',
default='remote_output.xml')
parser.add_argument('-l', '--log',
help='Where to save the HTML Log file on this machine once its been retrieved. Default: '
'remote_log.html',
default='remote_log.html')
parser.add_argument('-r', '--report',
help='Where to save the HTML Report file on this machine once its been retrieved. Default: '
'remote_report.html',
default='remote_report.html')
parser.add_argument('-F', '--extension',
help='Parse only files with this extension when executing a directory. Has no effect when '
'running individual files or when using resource files. If more than one extension is '
'needed, separate them with a colon.\r Examples: `--extension robot`, `-F robot:txt`')
parser.add_argument('-s', '--suite',
help='Select test suites to run by name. When this option is used with --test, --include or'
' --exclude, only test cases in matching suites and also matching other filtering '
'criteria are selected. Name can be a simple pattern similarly as with --test and it '
'can contain parent name separated with a dot. You can specify multiple filters by '
'concatenating with a colon. For example `-s X.Y` selects suite `Y` '
'only if its parent is `X`. -s X:Y:Z selects X, Y & Z')
# Arguments passed into robot.run on the remote host:
parser.add_argument('-t', '--test',
help='Select test cases to run by name or long name. Name is case insensitive and'
' it can also be a simple pattern where `*` matches anything and `?` matches any '
'char. To specify multiple, concatenate with a colon. Example: -t Foo*:Bar*')
parser.add_argument('-i', '--include',
help='Select test cases to run by tag. Similarly as name with --test, tag is case and '
'space insensitive and it is possible to use patterns with `*` and `?` as wildcards. '
'Tags and patterns can also be combined together with `AND`, `OR`, and `NOT` '
'operators. Examples: --include foo, --include bar*, --include fooANDbar*')
parser.add_argument('-e', '--exclude',
help='Select test cases not to run by tag. These tests are not run even if included with '
'--include. Tags are matched using the rules explained with --include.')
parser.add_argument('-L', '--loglevel',
help=' Threshold level for logging. Available levels: TRACE, DEBUG, INFO (default), WARN, '
'NONE (no logging). Use syntax `LOGLEVEL:DEFAULT` to define the default visible log '
'level in log files. Examples: --loglevel DEBUG --loglevel DEBUG:INFO')
return parser
def get_log_html_output_location(self):
"""
Determine the local output file location of the log.html based on the input arguments
:return: Path to where the log html file should be saved
:rtype: str
"""
return self._resolve_output_path(self.log)
def get_output_xml_output_location(self):
"""
Determine the local output file location of the output.xml based on the input arguments
:return: Path to where the output xml file should be saved
:rtype: str
"""
return self._resolve_output_path(self.output)
def get_report_html_output_location(self):
"""
Determine the local output file location of the report.html based on the input arguments
:return: Path to where the report html file should be saved
:rtype: str
"""
return self._resolve_output_path(self.report)
def _resolve_output_path(self, filename):
"""
Determine a path to output a file artifact based on whether the user specified the specific path
:param filename: Name of the file e.g. log.html
:type filename: str
:return: Absolute path of where to save the test artifact
:rtype: str
"""
ret_val = filename
if not os.path.isabs(filename):
ret_val = os.path.abspath(os.path.join(self.outputdir, filename))
return os.path.normpath(ret_val) | /robotframework-remoterunner-2.0.0.tar.gz/robotframework-remoterunner-2.0.0/src/rfremoterunner/executor_argparser.py | 0.795817 | 0.267375 | executor_argparser.py | pypi |
from robot.api import logger
from .model import LogMessage
def write(msg, level="INFO", html=False, attachment=None):
"""Writes the message to the log file using the given level.
Valid log levels are ``TRACE``, ``DEBUG``, ``INFO`` (default since RF
2.9.1), ``WARN``, and ``ERROR`` (new in RF 2.9). Additionally it is
possible to use ``HTML`` pseudo log level that logs the message as HTML
using the ``INFO`` level.
Attachment should contain a dict with "name", "data" and "mime" values
defined. See module example.
Instead of using this method, it is generally better to use the level
specific methods such as ``info`` and ``debug`` that have separate
``html`` argument to control the message format.
"""
log_message = LogMessage(msg)
log_message.level = level
log_message.attachment = attachment
logger.write(log_message, level, html)
def trace(msg, html=False, attachment=None):
"""Writes the message to the log file using the ``TRACE`` level."""
write(msg, "TRACE", html, attachment)
def debug(msg, html=False, attachment=None):
"""Writes the message to the log file using the ``DEBUG`` level."""
write(msg, "DEBUG", html, attachment)
def info(msg, html=False, also_console=False, attachment=None):
"""Writes the message to the log file using the ``INFO`` level.
If ``also_console`` argument is set to ``True``, the message is
written both to the log file and to the console.
"""
write(msg, "INFO", html, attachment)
if also_console:
console(msg)
def warn(msg, html=False, attachment=None):
"""Writes the message to the log file using the ``WARN`` level."""
write(msg, "WARN", html, attachment)
def error(msg, html=False, attachment=None):
"""Writes the message to the log file using the ``ERROR`` level."""
write(msg, "ERROR", html, attachment)
def console(msg, newline=True, stream="stdout"):
"""Writes the message to the console.
If the ``newline`` argument is ``True``, a newline character is
automatically added to the message.
By default the message is written to the standard output stream.
Using the standard error stream is possibly by giving the ``stream``
argument value ``'stderr'``.
"""
logger.console(msg, newline, stream) | /robotframework_reportportal_eci-1.1.0-py3-none-any.whl/robotframework_reportportal/logger.py | 0.799168 | 0.403743 | logger.py | pypi |
from datetime import datetime
from time import time
from typing import Any, Callable, Dict, List, Optional, Union
from reportportal_client.errors import ResponseError as ReportPortalResponseError
from reportportal_client.service import ReportPortalService, uri_join
from requests.exceptions import ConnectionError
from robot.libraries.BuiltIn import BuiltIn
from urllib3.exceptions import ResponseError
from .report import Report
from .model import Keyword, Suite, Test
def timestamp(rf_time: str = None) -> str:
"""Get a timestamp to use when sending logs to Report Portal.
Args:
rf_time: Time in the format used in RobotFramework.
Returns:
Time stamp for use in the log.
"""
if rf_time:
_timestamp = datetime.strptime(rf_time, "%Y%m%d %H:%M:%S.%f").timestamp()
else:
_timestamp = time()
return str(int(_timestamp * 1000))
def ignore_broken_pipe_error(func: Callable[..., Any]) -> Callable[..., Any]:
"""Decorator for ignore BrokenPipeError.
Args:
func: function to decorate.
Returns:
Decorated function.
"""
def d(*args: Any, **kwargs: Any) -> Any:
try:
return func(*args, **kwargs)
except ConnectionError as e:
message = str(e)
if 'BrokenPipeError' not in message:
raise ConnectionError(message)
RobotService.builtin_lib().log(message=message, level="WARN")
return d
class RobotService(object):
"""The class for working with the Report Portal service."""
rp: Optional[ReportPortalService] = None
builtin: Optional[BuiltIn] = None
report: Optional[Report] = None
status_mapping = {"PASS": "PASSED", "FAIL": "FAILED", "SKIP": "SKIPPED"}
log_level_mapping = {
"INFO": "INFO",
"FAIL": "ERROR",
"TRACE": "TRACE",
"DEBUG": "DEBUG",
"WARN": "WARN",
"ERROR": "ERROR"
}
@staticmethod
def builtin_lib() -> BuiltIn:
"""Return the BuiltIn library instance.
Returns:
BuiltIn: instance of the BuiltIn library.
"""
if not RobotService.builtin:
RobotService.builtin = BuiltIn()
return RobotService.builtin
@staticmethod
def rf_report() -> Report:
"""Return the instance of Report class.
Returns:
Report: instance of the Report class.
"""
if not RobotService.report:
RobotService.report = Report()
return RobotService.report
@staticmethod
def init_service(endpoint: str, project: str, uuid: str) -> None:
"""Initialization of the service for working with the Report Portal.
Args:
endpoint: Report Portal endpoint.
project: Report Portal project name.
uuid: Report Portal uuid.
"""
if RobotService.rp is None:
RobotService.rp = ReportPortalService(endpoint=endpoint, project=project, token=uuid)
else:
raise Exception("RobotFrameworkService is already initialized.")
@staticmethod
def terminate_service() -> None:
"""Terminate the service."""
if RobotService.rp is not None:
RobotService.rp.terminate()
@staticmethod
def start_launch(launch_name: str, launch_tags: List[str], launch: Suite, mode: str = None) -> str:
"""Register a new launch in Report Portal.
Args:
launch_name: launch name.
launch_tags: launch tags.
launch: model.Suite instance.
mode: data storage mode.
Returns:
Launch id.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
sl_pt = {
"name": launch_name,
"start_time": timestamp(),
"description": launch.doc,
"mode": mode,
"tags": launch_tags
}
return RobotService.rp.start_launch(**sl_pt)
@staticmethod
def finish_launch(launch: Suite) -> None:
"""Finishes the launch in the Report Portal.
Args:
launch: model.Suite instance.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
fl_rq = {"end_time": timestamp(), "status": RobotService.status_mapping[launch.status]}
RobotService.rp.finish_launch(**fl_rq)
@staticmethod
def start_suite(suite: Suite) -> None:
"""Register the start of a new suite.
Args:
suite: model.Suite instance.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
start_rq = {
"name": suite.longname,
"description": suite.doc,
"tags": [],
"start_time": timestamp(rf_time=suite.start_time),
"item_type": suite.rp_item_type
}
RobotService.rp.start_test_item(**start_rq)
@staticmethod
def finish_suite(suite: Suite, issue: str = None) -> None:
"""Finishes the suite in the Report Portal.
Args:
suite: model.Suite instance.
issue: issue number is automatically attached to log object.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
fta_rq = {
"end_time": timestamp(rf_time=suite.end_time),
"status": RobotService.status_mapping[suite.status],
"issue": issue
}
RobotService.rp.finish_test_item(**fta_rq)
@staticmethod
def start_test(test: Test) -> None:
"""Register the start of a new test.
Args:
test: model.Test instance.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
description = test.doc.strip()
report_link = RobotService.rf_report().get_url_to_report_by_case_id(test=test)
if report_link:
description += f"\n\n[Link to Report]({report_link})"
start_rq = {
"name": test.name,
"description": description,
"tags": test.tags,
"start_time": timestamp(rf_time=test.start_time),
"item_type": test.rp_item_type
}
RobotService.rp.start_test_item(**start_rq)
@staticmethod
def finish_test(test: Test, issue: str = None) -> None:
"""Finishes the test in the Report Portal.
Args:
test: model.Test instance.
issue: issue number is automatically attached to log object.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
fta_rq = {
"end_time": timestamp(rf_time=test.end_time),
"status": RobotService.status_mapping[test.status],
"issue": issue
}
RobotService.rp.finish_test_item(**fta_rq)
@staticmethod
def start_keyword(keyword: Keyword) -> None:
"""Register the start of a new keyword.
Args:
keyword: model.Keyword instance.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
start_rq = {
"name": keyword.get_name(),
"description": keyword.doc,
"tags": keyword.tags,
"start_time": timestamp(rf_time=keyword.start_time),
"item_type": keyword.rp_item_type
}
RobotService.rp.start_test_item(**start_rq)
@staticmethod
def finish_keyword(keyword: Keyword, issue: str = None) -> None:
"""Finishes the keyword in the Report Portal.
Args:
keyword: model.Keyword instance.
issue: issue number is automatically attached to log object.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
fta_rq = {
"end_time": timestamp(rf_time=keyword.end_time),
"status": RobotService.status_mapping[keyword.status],
"issue": issue
}
RobotService.rp.finish_test_item(**fta_rq)
@staticmethod
@ignore_broken_pipe_error
def log(log_data: Union[list, dict]) -> None:
"""Send a message in the Report Portal log.
Args:
log_data: message, or a list of messages prepared for logging in ReportPortal.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
try:
if isinstance(log_data, dict):
RobotService.rp.log(**log_data)
elif isinstance(log_data, list):
RobotService.rp.log_batch(log_data)
except (ResponseError, ReportPortalResponseError) as e:
error = str(e)
message: Union[str, Dict[str, Any]] = f"RobotService.rp.log failed with ResponseError. " \
f"See logs of a certain test.\n{error}"
RobotService.builtin_lib().log_to_console(message=message)
if "Maximum upload size" in error:
message = {"message": message, "level": "INFO", "time": timestamp()}
RobotService.rp.log(**message)
@staticmethod
def get_items_info(**params: Any) -> Dict[str, Any]:
"""Gets information about items from current launch.
Args:
params: request parameters.
Returns:
Items information.
"""
if RobotService.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
params["filter.eq.launch"] = RobotService.rp.launch_id
url = uri_join(RobotService.rp.base_url, "item")
response = RobotService.rp.session.get(url=url, params=params)
return response.json() | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/service.py | 0.932014 | 0.240552 | service.py | pypi |
import os
import re
from typing import Any, Dict, Union
from html import unescape
from mimetypes import guess_type
from robot.libraries.BuiltIn import BuiltIn
# Patterns for editing HTML messages.
HTML_MESSAGE_PATTERN = re.compile(r"<details><summary>(?P<summary>.*)</summary><p>(?P<message>.*)</p></details>", re.S)
SCREENSHOT_PATH_PATTERN = re.compile(r'<img src="(?P<path>[_/.\-\w]*)"', re.S)
class MessageFormatter(object):
"""Class for formatting log messages in ReportPortal."""
@staticmethod
def format_message(message: Dict[str, Any], keyword_name: str) -> Dict[str, Any]:
"""Method for formatting message.
Truncate message and cut any html attribute:
tags: details, summary, p; quote symbols: > and other.
Adds attachment to message if it exists.
And prepare message to correctly display in Report Portal.
Args:
message: message of the step of keyword.
keyword_name: current keyword name.
Returns:
Dictionary with message information, that is correctly displayed in Report Portal.
"""
path_to_screen = SCREENSHOT_PATH_PATTERN.search(message["message"])
if path_to_screen:
message["attachment"] = MessageFormatter._get_attachment(attachment_path=path_to_screen.group("path"))
message["message"] = f'Screen shot in the keyword "{keyword_name}"'
else:
message["message"] = MessageFormatter._strip_html_tags(message=message["message"])
message["message"] = MessageFormatter._truncate_message(message=message["message"])
message = {
"time": message.get("timestamp"),
"message": message["message"],
"level": message["level"],
"attachment": message.get("attachment"),
}
return message
@staticmethod
def _get_attachment(attachment_path: str, is_absolute_path: bool = False) -> Dict[str, str]:
"""Gets attachment information to use in Report Portal.
Args:
attachment_path: path to attachment.
is_absolute_path: flag indicating path to attachment is absolute or not.
Returns:
Information by attachment for log message.
"""
if not is_absolute_path:
output_dir = BuiltIn().get_variable_value("${OUTPUT_DIR}")
attachment_path = os.path.join(output_dir, attachment_path)
with open(attachment_path, "rb") as attachment:
attachment_info = {
"name": os.path.basename(attachment_path),
"data": str(attachment.read()),
"mime": guess_type(attachment_path)[0] or "application/octet-stream"
}
return attachment_info
@staticmethod
def _strip_html_tags(message: str) -> str:
"""Method for unquote message and removing html tags: details, summary, p.
Args:
message: message of the step of keyword.
Returns:
String, where all quotes and html tags are removing.
"""
match = HTML_MESSAGE_PATTERN.fullmatch(message)
if match:
summary, message = match.group("summary", "message")
if not message.startswith(summary):
message = "\n".join([summary, message])
message = unescape(message)
return message
@staticmethod
def _truncate_message(message: Union[str, dict], max_length: int = 8388608) -> Union[str, Dict[str, Any]]:
"""Truncate message to the maximum length.
Args:
message: message of the step of keyword. It may be string or dictionary.
max_length: max length of the message.
Returns:
String or dictionary, that is truncated.
"""
if isinstance(message, dict):
message["message"] = message["message"][:max_length] + (message["message"][max_length:] and "..")
else:
message = message[:max_length] + (message[max_length:] and "..")
return message | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/message.py | 0.862872 | 0.158044 | message.py | pypi |
import os
from typing import Dict, Optional
from robot.api import logger
from .model import Test
class Report(object):
"""Class which helps to build link to Robot Framework report."""
def __init__(self) -> None:
"""Initialization."""
self._report_link: Optional[str] = None
@property
def report_link(self) -> str:
"""Get link to Robot Framework report file.
Returns:
Cached value of link to report file.
"""
if self._report_link is None:
self._report_link = self._build_link_to_report_file()
return self._report_link
def get_url_to_report_by_case_id(self, test: Test) -> str:
"""Get url to Robot Framework report file by TestRail case id.
Args:
test: test object from the model.
Returns:
Report URL.
"""
report_url = self.report_link
if report_url:
case_id = self._get_tag_with_testrailid(test=test)
report_url = f'{report_url}#search?include={case_id}'
return report_url
@staticmethod
def _get_tag_with_testrailid(test: Test) -> Optional[str]:
"""Get tag containing TestRail case id.
Args:
test: test object from the model.
Returns:
Tag with case id.
"""
for tag in test.tags:
if tag.startswith('testrailid='):
return tag
return None
def _build_link_to_report_file(self) -> str:
""""Build url to Robot Framework report file.
Returns:
URL to report file.
"""
build_url = ''
ci_vars = self._get_vars_for_report_link()
if 'TEAMCITY_HOST_URL' in ci_vars:
base_hostname = ci_vars.get('TEAMCITY_HOST_URL')
buildtype_id = ci_vars.get('TEAMCITY_BUILDTYPE_ID')
build_id = ci_vars.get('TEAMCITY_BUILD_ID')
report_artifact_path = ci_vars.get('REPORT_ARTIFACT_PATH')
build_url = (f'{base_hostname}/repository/download/{buildtype_id}'
f'/{build_id}:id/{report_artifact_path}')
elif 'JENKINS_BUILD_URL' in ci_vars:
build_url = ci_vars['JENKINS_BUILD_URL'] + 'robot/report'
return f'{build_url}/report.html' if build_url else ''
@staticmethod
def _get_vars_for_report_link() -> Dict[str, str]:
""""Getting values from environment variables for preparing link to report.
If tests are running in CI, the environment variables should be defined
in the CI configuration settings to get url to the test case report.
The following variables are used:
for Teamcity - TEAMCITY_HOST_URL, TEAMCITY_BUILDTYPE_ID, TEAMCITY_BUILD_ID, REPORT_ARTIFACT_PATH;
for Jenkins - JENKINS_BUILD_URL.
If these variables are not found, the link to report will not be formed.
== Example ==
1. for Teamcity
| Changing build configuration settings
| REPORT_ARTIFACT_PATH output
| TEAMCITY_BUILD_ID %teamcity.build.id%
| TEAMCITY_BUILDTYPE_ID %system.teamcity.buildType.id%
| TEAMCITY_HOST_URL https://teamcity.billing.ru
2. for Jenkins
| add to the shell the execution of the docker container parameter
| -e "JENKINS_BUILD_URL = ${BUILD_URL}"
Returns:
Dictionary with environment variables.
"""
variables: Dict[str, str] = {}
env_var = os.environ.copy() or {}
if 'TEAMCITY_HOST_URL' in env_var:
teamcity_vars = {'TEAMCITY_HOST_URL', 'TEAMCITY_BUILDTYPE_ID', 'TEAMCITY_BUILD_ID', 'REPORT_ARTIFACT_PATH'}
try:
variables = {var: env_var[var] for var in teamcity_vars}
except KeyError:
error_msg = "[reportportal-listener] There are no variables for getting a link to the report by tests."
logger.error(error_msg)
elif 'JENKINS_BUILD_URL' in env_var:
variables = {'JENKINS_BUILD_URL': env_var['JENKINS_BUILD_URL']}
return variables | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/report.py | 0.854095 | 0.161816 | report.py | pypi |
from typing import Any, List, Optional
from robot.libraries.BuiltIn import BuiltIn
def get_variable(name: str, default: Any = None) -> Any:
"""Gets the Robot Framework variable.
Args:
name: variable name.
default: default value.
Returns:
The value of the variable, otherwise, the default value.
"""
return BuiltIn().get_variable_value("${" + name + "}", default=default)
class Variables(object):
"""Class for initializing and storing listener settings."""
def __init__(self) -> None:
"""Class initialization."""
self._uuid: Optional[str] = None
self._endpoint: Optional[str] = None
self._launch_name: Optional[str] = None
self._project: Optional[str] = None
self._launch_doc: Optional[str] = None
self._launch_tags: Optional[List[str]] = None
@property
def uuid(self) -> str:
"""Gets the user uuid for accessing the ReportPortal.
Raises:
AssertionError if RP_UUID variable is empty or does not exist.
Returns:
User uuid
"""
self._uuid = self._uuid or get_variable("RP_UUID")
if self._uuid is None:
raise AssertionError("Missing parameter RP_UUID for robot run\nYou should pass -v RP_UUID:<uuid_value>")
return self._uuid
@property
def endpoint(self) -> str:
"""Gets the ReportPortal endpoint.
Raises:
AssertionError if RP_ENDPOINT variable is empty or does not exist.
Returns:
ReportPortal endpoint.
"""
self._endpoint = self._endpoint or get_variable("RP_ENDPOINT")
if self._endpoint is None:
raise AssertionError("Missing parameter RP_ENDPOINT for robot run\n"
"You should pass -v RP_RP_ENDPOINT:<endpoint_value>")
return self._endpoint
@property
def launch_name(self) -> str:
"""Gets the ReportPortal launch name.
Raises:
AssertionError if RP_LAUNCH variable is empty or does not exist.
Returns:
ReportPortal launch name.
"""
self._launch_name = self._launch_name or get_variable("RP_LAUNCH")
if self._launch_name is None:
raise AssertionError("Missing parameter RP_LAUNCH for robot run\n"
"You should pass -v RP_LAUNCH:<launch_name_value>")
return self._launch_name
@property
def project(self) -> str:
"""Gets the ReportPortal project name.
Raises:
AssertionError if RP_PROJECT variable is empty or does not exist.
Returns:
ReportPortal project name.
"""
self._project = self._project or get_variable("RP_PROJECT")
if self._project is None:
raise AssertionError("Missing parameter RP_PROJECT for robot run\n"
"You should pass -v RP_PROJECT:<project_name_value>")
return self._project
@property
def launch_doc(self) -> str:
"""Gets the ReportPortal launch documentation.
Returns:
ReportPortal launch documentation.
"""
if self._launch_doc is None:
self._launch_doc = get_variable("RP_LAUNCH_DOC", "")
return self._launch_doc
@property
def launch_tags(self) -> List[str]:
"""Gets the ReportPortal launch tags.
Returns:
ReportPortal launch tags.
"""
if self._launch_tags is None:
self._launch_tags = get_variable("RP_LAUNCH_TAGS", "").split(",")
return self._launch_tags | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/variables.py | 0.946498 | 0.40987 | variables.py | pypi |
from typing import Any, Dict, List, Optional, Type
from robot.api import ResultVisitor
from robot.result.model import TestCase, TestSuite
from .service import RobotService
class RobotFrameworkReportModifier(ResultVisitor):
"""Class for modifying Robot Framework report."""
def __init__(self, robot_service: Type[RobotService]) -> None: # noqa: E951
"""Init Result Visitor.
Args:
robot_service: instance RobotService.
"""
self._robot_service = robot_service
self._uri_parts: Optional[Dict[str, str]] = None
@property
def uri_parts(self) -> Dict[str, str]:
"""Sets value to _uri_parts attribute if it is not specified.
Returns:
Value of _uri_parts attribute.
"""
if self._uri_parts is None:
self._uri_parts = self._get_rp_uri_parts()
return self._uri_parts
def start_suite(self, suite: TestSuite) -> None:
"""Visits top level suite in result and adds Report Portal link in metadata.
Args:
suite: visited suite.
"""
if not suite.parent:
suite.metadata["Report Portal"] = self.get_link_to_rp_report()
def start_test(self, test: TestCase) -> None:
"""Visits each test in result and adds Report Portal link in documentation.
Args:
test: visited test.
"""
link = self.get_link_to_rp_report(test=test)
message = f'[{link} | Report Portal]'
test.doc = '\n\n'.join([test.doc, message]) if test.doc else message
def _get_rp_uri_parts(self) -> Dict[str, str]:
"""Gets uri parts, for item name.
Returns:
Dictionary with item longname as key and uri part as value.
"""
tests = self._get_rp_tests_info()
parts = {}
for test in tests:
if len(test["path_names"]) > 1:
continue
suite_id = test.get("parent")
if suite_id:
suite_longname = test["path_names"][suite_id]
test_longname = f"{suite_longname}.{test['name']}"
parts[test_longname] = "/" + test["id"] if test["has_childs"] else "?log.item=" + test["id"]
parts[suite_longname] = "/" + suite_id
return parts
def _get_rp_tests_info(self) -> List[Dict[str, Any]]:
"""Gets information about tests items from Report Portal.
Returns:
Information about tests.
"""
params = {"page.size": 300, "filter.eq.type": "STEP"}
response = self._robot_service.get_items_info(**params)
page_num, page_count, tests = 2, response["page"]["totalPages"], response["content"]
while page_num <= page_count:
params["page.page"] = page_num
response = self._robot_service.get_items_info(**params)
tests.extend(response["content"])
page_num += 1
return tests
def get_link_to_rp_report(self, test: TestCase = None) -> str:
"""Gets link to report in Report Portal.
Args:
test: test object from Robot Framework.
Returns:
Link to report.
"""
if self._robot_service.rp is None:
raise RuntimeError("RobotFrameworkService is not initialized.")
link = f"{self._robot_service.rp.endpoint}/ui/#{self._robot_service.rp.project}" \
f"/launches/all/{self._robot_service.rp.launch_id}"
if test:
suite_uri = self.uri_parts.get(test.parent.longname)
test_uri = self.uri_parts.get(test.longname)
if suite_uri:
link += suite_uri
if test_uri:
link += test_uri
return link | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/report_modifier.py | 0.927969 | 0.313499 | report_modifier.py | pypi |
from typing import Any, Dict, List, Optional, Union
class Suite(object):
"""Object describes suite."""
def __init__(self, attributes: Dict[str, Any]) -> None:
"""Suite initialization.
Args:
attributes: suite attributes from Robot Framework.
"""
super(Suite, self).__init__()
self.suites: List[str] = attributes["suites"]
self.doc: str = attributes["doc"]
self.source: str = attributes["source"]
self.total_tests: int = attributes["totaltests"]
self.longname: str = attributes["longname"]
self.robot_id: str = attributes["id"]
self.metadata: Dict[str, str] = attributes["metadata"]
self.start_time: str = attributes["starttime"]
self.end_time: str = attributes.get("endtime", "")
self.status: str = attributes.get("status", "")
self.message: Dict[str, Any] = {}
self.statistics: str = attributes.get("statistics", "")
self.rp_item_type: str = "TEST" if attributes.get("tests") else "SUITE"
self.tests: List[Test] = []
self.type: str = "SUITE"
self.setup: Optional[Keyword] = None
self.teardown: Optional[Keyword] = None
def update(self, attributes: Dict[str, Any]) -> None:
"""Update suite STATUS, MESSAGE, STATISTICS and ENDTIME.
Args:
attributes (dict): suite attributes.
"""
self.end_time = attributes.get("endtime", "")
self.status = attributes.get("status", "")
self.statistics = attributes.get("statistics", "")
class Test(object):
"""Object describes test."""
def __init__(self, name: str, attributes: Dict[str, Any]) -> None:
"""Test initialization.
Args:
name: test name.
attributes: test attributes from Robot Framework.
"""
super(Test, self).__init__()
self.name: str = name
self.critical: str = attributes["critical"]
self.template: str = attributes["template"]
self.tags: List[str] = attributes["tags"]
self.doc: str = attributes["doc"]
self.longname: str = attributes["longname"]
self.robot_id: str = attributes["id"]
self.start_time: str = attributes["starttime"]
self.status: str = attributes.get("status", "")
self.message: Dict[str, Any] = {}
self.end_time: str = attributes.get("endtime", "")
self.rp_item_type: str = "STEP"
self.type: str = "TEST"
self.setup: Optional[Keyword] = None
self.teardown: Optional[Keyword] = None
self.steps: List[Keyword] = []
def update(self, attributes: Dict[str, Any]) -> None:
"""Update test STATUS, MESSAGE and ENDTIME.
Args:
attributes (dict): test attributes.
"""
self.status = attributes.get("status", "")
self.tags = attributes.get("tags", [])
self.end_time = attributes.get("endtime", "")
class Keyword(object):
"""Object describes keyword."""
def __init__(self, name: str, attributes: Dict[str, Any], parent: Union[Suite, Test, "Keyword"]) -> None:
"""Keyword initialization.
Args:
name: keyword name with library name.
attributes: keyword attributes from Robot Framework.
parent: parent object, may be Keyword, Test or Suite.
"""
super(Keyword, self).__init__()
self.name = name
self.libname: str = attributes["libname"]
self.keyword_name: str = attributes["kwname"]
self.doc: str = attributes["doc"]
self.tags: List[str] = attributes["tags"]
self.args: List[str] = attributes["args"]
self.assign: List[str] = attributes["assign"]
self.start_time: str = attributes["starttime"]
self.end_time: str = attributes.get("endtime", "")
self.status: str = attributes.get("status", "")
self.parent = parent
self.messages: List[Dict[str, Any]] = []
self.steps: List[Keyword] = []
self.type: str = attributes["type"]
self._rp_item_type: Optional[str] = None
@property
def rp_item_type(self) -> str:
"""Get Report Portal item type.
Returns:
Item type.
"""
if self._rp_item_type is None:
if self.type == "Setup":
self._rp_item_type = f"BEFORE_{self.parent.type}"
elif self.type == "Teardown":
self._rp_item_type = f"AFTER_{self.parent.type}"
else:
self._rp_item_type = "STEP"
return self._rp_item_type
@property
def is_wuks(self) -> bool:
"""Define if current keyword is WUKS.
Returns:
True if this keyword is Wait Until Keyword Succeeds, else - False.
"""
return self.name in [u"BuiltIn.Wait Until Keyword Succeeds"]
@property
def is_top_level(self) -> bool:
"""Check keyword at top level or not.
Returns:
Boolean.
"""
if isinstance(self.parent, Keyword):
return self.parent.rp_item_type != "STEP" and self.rp_item_type == "STEP"
else:
return self.rp_item_type == "STEP"
@property
def is_setup_or_teardown(self) -> bool:
"""Check keyword is at Setup/Teardown or not.
Returns:
Boolean.
"""
return self.rp_item_type != "STEP"
def get_name(self) -> str:
"""Get keyword name.
Returns:
Name is cropped up to 256 characters.
"""
assignment = f"{', '.join(self.assign)} = " if self.assign else ""
arguments = ", ".join(self.args)
full_name = f"{assignment}{self.name} ({arguments})"
return full_name[:256]
def update(self, attributes: Dict[str, Any]) -> None:
"""Update keyword STATUS and ENDTIME.
Args:
attributes (dict): keyword attributes.
"""
self.status = attributes.get("status", "")
self.end_time = attributes.get("endtime", "") | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/model.py | 0.951165 | 0.213039 | model.py | pypi |
import logging
from os import environ
from typing import Any, Dict, List, Optional, Union
from robot.api import ExecutionResult
from robot.libraries.BuiltIn import BuiltIn
from robot.utils import get_error_message
from .model import Keyword, Test, Suite
from .service import RobotService
from .variables import Variables
from .message import MessageFormatter
from .service import timestamp
from .report_modifier import RobotFrameworkReportModifier
# The id of the first suite keyword in the Robot Framework html log.
FIRST_SUITE_ID = "s1"
# The key for using the Report Portal launch ID between the pabotlib threads.
PABOT_LIB_LAUNCH_ID = "PABOT_LIB_LAUNCH_ID"
# The key for blocking the initial initialization of the Report Portal launch.
PABOT_LIB_LAUNCH_LOCK = "PABOT_LIB_LAUNCH_LOCK"
# Disable redundant logging in the report portal client.
logging.getLogger(name="reportportal_client").setLevel(logging.WARNING)
logging.getLogger(name="urllib3").setLevel(logging.WARNING)
# noinspection PyPep8Naming
class reportportal_listener(object): # noqa
"""Listener for working with Report Portal."""
ROBOT_LISTENER_API_VERSION = 2
builtin_lib: BuiltIn = BuiltIn()
def __init__(self, launch_id: str = None) -> None:
"""Init Report Portal listener.
Args:
launch_id: id of launch created to log test results in Report Portal.
"""
self._launch_id = launch_id
self._service = RobotService
self._pabot_used: Optional[str] = None
self._suite: Optional[Suite] = None
self._test: Optional[Test] = None
self._keyword: Optional[Keyword] = None
self._current_scope: Union[Suite, Test, Keyword, None] = None
self._variables = Variables()
@property
def suite(self) -> Suite:
"""Gets current suite.
Raises:
RuntimeError if suite does not exists.
Returns:
model.Suite class instance.
"""
if self._suite is None:
raise RuntimeError("Suite not running.")
return self._suite
@property
def test(self) -> Test:
"""Gets current test.
Raises:
RuntimeError if test does not exists.
Returns:
model.Test class instance.
"""
if self._test is None:
raise RuntimeError("Test not running")
return self._test
@property
def keyword(self) -> Keyword:
"""Gets current keyword.
Raises:
RuntimeError if keyword is not exists.
Returns:
model.Keyword class instance.
"""
if self._keyword is None:
raise RuntimeError("Keyword not running.")
return self._keyword
@property
def current_scope(self) -> Union[Suite, Test, Keyword]:
"""Gets current scope.
Raises:
RuntimeError if current scope is not set.
Returns:
model.Keyword, model.Test or model.Suite class instance.
"""
if self._current_scope is None:
raise RuntimeError("Current scope is not set.")
return self._current_scope
@property
def pabot_used(self) -> Optional[str]:
"""Get status of using pabot for test execution.
Returns:
Cached value of Pabotlib URI.
"""
if not self._pabot_used:
self._pabot_used = self.builtin_lib.get_variable_value(name="${PABOTLIBURI}")
return self._pabot_used
def log_message(self, message: Dict[str, str]) -> None:
"""Log message of current executing keyword.
Adds log message to current keyword.
Message will be added if keyword is at top level or keyword type is setup/teardown,
keyword is not WUKS and message level is not "FAIL".
Args:
message: current message passed from test by test executor.
"""
if self.keyword.is_top_level or self.keyword.is_setup_or_teardown:
if not self.keyword.is_wuks and message["level"] != "FAIL":
message = self._prepare_message(message)
self.keyword.messages.append(message)
def _init_service(self) -> None:
"""Init report portal service."""
# Setting launch id for report portal service.
self._service.init_service(endpoint=self._variables.endpoint, project=self._variables.project,
uuid=self._variables.uuid)
def start_suite(self, name: str, attributes: Dict[str, Any]) -> None:
"""Do additional actions before suite start.
Create new launch in report portal if it is not created yet, or create new suite with tests.
Depends on stage of test execution.
Args:
name: suite name.
attributes: suite attributes dictionary.
"""
if self._service.rp is None:
self._init_service()
self._suite = self._current_scope = Suite(attributes=attributes)
if attributes["id"] == FIRST_SUITE_ID and self._service.rp:
# If launch id is specified - use it.
# Otherwise, create launch automatically.
if self._launch_id is not None:
self._service.rp.launch_id = self._launch_id
else:
# In case running tests using robot we can create launch automatically.
if self.pabot_used:
raise Exception("Pabot used but launch_id is not provided. "
"Please, correctly initialize listener with launch_id argument.")
# Fill launch description with contents of corresponding variable value.
self.suite.doc = self._variables.launch_doc
# Automatically create new report portal launch and save it into the service instance.
self._service.rp.launch_id = self._service.start_launch(launch_name=self._variables.launch_name,
launch_tags=self._variables.launch_tags,
launch=self.suite)
if attributes["tests"]:
self._service.start_suite(suite=self.suite)
def end_suite(self, name: str, attributes: Dict[str, Any]) -> None:
"""Do additional actions after suite run.
Send tests logs of current suite to Report Portal.
Close report portal launch or finish current suite with corresponding status.
Args:
name: suite name.
attributes: suite attributes.
"""
self.suite.update(attributes=attributes)
if attributes["tests"]:
if attributes["message"]:
msg = {"message": attributes["message"], "level": "FAIL", "timestamp": self.suite.end_time}
self.suite.message = self._prepare_message(msg)
self._rp_log_tests()
self._service.finish_suite(suite=self.suite)
if attributes["id"] == FIRST_SUITE_ID:
# If we create a launch from the outside of the script,
# finishing launch should be made outside too.
# Otherwise it is possible to finish a launch for several times.
if self._launch_id is None:
# If we run tests without pabot then we use robot,
# thus we can finish a launch automatically.
if not self.pabot_used:
self._service.finish_launch(launch=self.suite)
def start_test(self, name: str, attributes: Dict[str, Union[str, List[str]]]) -> None:
"""Do additional actions before test run.
Create Test model for current test.
Args:
name: test name.
attributes: test attributes.
"""
self._test = self._current_scope = Test(name=name, attributes=attributes)
def end_test(self, name: str, attributes: Dict[str, Union[str, List[str]]]) -> None:
"""Do additional actions after test run.
Update Test model and add to current suite.
Args:
name: test name.
attributes: test attributes.
"""
self.test.update(attributes=attributes)
if attributes["message"]:
msg = {"message": attributes["message"], "level": "FAIL", "timestamp": self.test.end_time}
if "skipped" in self.test.tags:
self.test.status = "SKIP"
msg["level"] = "WARN"
self.test.message = self._prepare_message(msg)
self.suite.tests.append(self.test)
self._current_scope = self.suite
def start_keyword(self, name: str, attributes: Dict[str, Union[str, List[str]]]) -> None:
"""Do additional actions before keyword starts.
Create Keyword model for current keyword, if it is at top level or a fixture.
Add Keyword model to corresponding parent model.
Args:
name: keyword name.
attributes: keyword attributes.
"""
self._keyword = self._current_scope = Keyword(name=name, attributes=attributes, parent=self.current_scope)
if self.keyword.is_setup_or_teardown and isinstance(self.keyword.parent, Test):
self.keyword.tags = self.keyword.parent.tags
if attributes["type"] == "Setup" and not isinstance(self.keyword.parent, Keyword):
self.keyword.parent.setup = self.keyword
elif attributes["type"] == "Teardown" and not isinstance(self.keyword.parent, Keyword):
self.keyword.parent.teardown = self.keyword
elif self._keyword.is_top_level and not isinstance(self.keyword.parent, Suite):
self.keyword.parent.steps.append(self.keyword)
def end_keyword(self, name: str, attributes: Dict[str, Union[str, List[str]]]) -> None:
"""Do additional actions after keyword ends.
Update current Keyword model.
For keywords with type "BEFORE_SUITE" and "AFTER_SUITE" send logs to Report Portal.
Args:
name: keyword name.
attributes: keyword attributes.
"""
self.keyword.update(attributes=attributes)
if self.keyword.is_setup_or_teardown:
error_message = get_error_message()
message = {"message": error_message, "level": "FAIL", "timestamp": self.keyword.end_time}
if self.keyword.status == "FAIL":
message = self._prepare_message(message=message)
self.keyword.messages.append(message)
elif "Skip tests:" in error_message:
self.keyword.status = "SKIP"
message["level"] = "WARN"
message = self._prepare_message(message=message)
self.keyword.messages.append(message)
if self.keyword.rp_item_type in ["BEFORE_SUITE", "AFTER_SUITE"]:
self._rp_log_fixture_keyword(keyword=self.keyword)
self._current_scope = self.keyword.parent
if isinstance(self.current_scope, Keyword):
self._keyword = self.current_scope
def output_file(self, path: str) -> None:
"""Called when writing to an output file is ready.
Adds Report Portal links to output file.
Args:
path: absolute path to output file.
"""
result = ExecutionResult(path)
result.visit(RobotFrameworkReportModifier(robot_service=RobotService))
result.save()
def close(self) -> None:
"""Called when the whole test execution ends.
Terminating service.
"""
self._service.terminate_service()
def _rp_log_steps(self, steps: List[Keyword], additional_msgs: List[Dict[str, Any]] = None) -> None:
"""Send steps logs of test or keyword to Report Portal.
Args:
steps (list): test or keyword steps, contain Keyword models.
additional_msgs (list): additional messages, they will be logged last.
"""
messages = []
for step in steps:
msg = {"message": step.name, "level": "INFO", "timestamp": step.start_time}
messages.append(self._prepare_message(msg))
messages.extend(step.messages)
if additional_msgs:
messages.extend(additional_msgs)
self._service.log(log_data=messages)
def _rp_log_fixture_keyword(self, keyword: Optional[Keyword]) -> None:
"""Send fixture keyword logs to Report Portal.
Args:
keyword: logging keyword.
"""
if keyword:
self._service.start_keyword(keyword=keyword)
if keyword.steps:
error_messages = [msg for msg in keyword.messages if msg["level"] == "ERROR"]
self._rp_log_steps(steps=keyword.steps, additional_msgs=error_messages)
else:
self._service.log(log_data=keyword.messages)
self._service.finish_keyword(keyword=keyword)
def _rp_log_tests(self) -> None:
"""Send tests logs of current suite to Report Portal."""
for test in self.suite.tests:
error_msg = self._get_test_error(test=test)
if error_msg:
if test.status != "SKIP":
test.status = "FAIL"
if environ.get("STACK_TRACE_DESCRIPTION") == '1':
test.doc += f"\n```error\n{error_msg['message']}\n```"
self._service.start_test(test=test)
additional_msgs = [error_msg] if error_msg else None
if test.setup or test.teardown:
self._rp_log_fixture_keyword(keyword=test.setup)
if test.setup:
test.start_time = test.setup.end_time
self._service.start_test(test=test)
self._rp_log_steps(steps=test.steps, additional_msgs=additional_msgs)
self._service.finish_test(test=test)
self._rp_log_fixture_keyword(keyword=test.teardown)
else:
self._rp_log_steps(steps=test.steps, additional_msgs=additional_msgs)
self._service.finish_test(test=test)
def _prepare_message(self, message: Dict[str, Any]) -> Dict[str, Any]:
"""Prepare message for sending to Report Portal.
Args:
message (dict): message for preparing.
Returns:
Message dictionary, contains LEVEL, MESSAGE, TIME, ATTACHMENT.
"""
message["timestamp"] = timestamp(rf_time=message["timestamp"])
message["level"] = self._service.log_level_mapping[message["level"]]
message = MessageFormatter.format_message(message=message, keyword_name=self.keyword.name)
return message
def _get_test_error(self, test: Test) -> Dict[str, Any]:
"""Gets test error considering Suite errors.
Args:
test: instance of Test model.
Returns:
Test error message.
"""
if getattr(self.suite.setup, "status", None) == "FAIL":
error_msg = self.suite.message
elif getattr(self.suite.teardown, "status", None) == "FAIL":
if test.message:
error_msg = test.message
error_msg["message"] = f"{test.message['message']}\n\n{self.suite.message['message']}"
else:
error_msg = self.suite.message
else:
error_msg = test.message
return error_msg | /robotframework-reportportal-ng-2.0.0.tar.gz/robotframework-reportportal-ng-2.0.0/reportportal_listener/__init__.py | 0.877207 | 0.165357 | __init__.py | pypi |
from robot.api import logger
from .model import LogMessage
def write(msg, level='INFO', html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the given level.
Valid log levels are ``TRACE``, ``DEBUG``, ``INFO`` (default since RF
2.9.1), ``WARN``, and ``ERROR`` (new in RF 2.9). Additionally it is
possible to use ``HTML`` pseudo log level that logs the message as HTML
using the ``INFO`` level.
Attachment should contain a dict with "name", "data" and "mime" values
defined. See module example.
Instead of using this method, it is generally better to use the level
specific methods such as ``info`` and ``debug`` that have separate
:param msg: argument to control the message format.
:param level: log level
:param html: format or not format the message as html.
:param attachment: a binary content to attach to the log entry
:param launch_log: put the log entry on Launch level
"""
log_message = LogMessage(msg)
log_message.level = level
log_message.attachment = attachment
log_message.launch_log = launch_log
logger.write(log_message, level, html)
def trace(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``TRACE`` level."""
write(msg, "TRACE", html, attachment, launch_log)
def debug(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``DEBUG`` level."""
write(msg, "DEBUG", html, attachment, launch_log)
def info(msg, html=False, also_console=False, attachment=None,
launch_log=False):
"""Write the message to the log file using the ``INFO`` level.
If ``also_console`` argument is set to ``True``, the message is
written both to the log file and to the console.
"""
write(msg, "INFO", html, attachment, launch_log)
if also_console:
console(msg)
def warn(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``WARN`` level."""
write(msg, "WARN", html, attachment, launch_log)
def error(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``ERROR`` level."""
write(msg, "ERROR", html, attachment, launch_log)
def console(msg, newline=True, stream="stdout"):
"""Write the message to the console.
If the ``newline`` argument is ``True``, a newline character is
automatically added to the message.
By default the message is written to the standard output stream.
Using the standard error stream is possibly by giving the ``stream``
argument value ``'stderr'``.
"""
logger.console(msg, newline, stream) | /robotframework_reportportal_updated-1.1.10-py3-none-any.whl/robotframework_reportportal_updated/logger.py | 0.819857 | 0.404802 | logger.py | pypi |
import logging
from robot.api import ResultVisitor
_stack = []
corrections = {}
class TimeVisitor(ResultVisitor):
@staticmethod
def _correct_starts(o, node_class):
"""
starttime wants to be the oldest start time of its children.
only correcting null starttime.
"""
if o.starttime:
corrected = False
for parent_id in _stack:
if corrections[parent_id][0] is None or \
corrections[parent_id][0] > o.starttime:
corrections[parent_id][0] = o.starttime
corrected = True
if corrected:
logging.debug(
"Correcting parents' starttime to {0} based on {2}={1}"
.format(o.starttime, o.id, node_class))
else:
_stack.append(o.id)
corrections[o.id] = [None, None]
@staticmethod
def _correct_ends(o, node_class):
"""
endtime wants to be the newest end time of its children.
only correcting null endtime.
"""
if o.endtime:
corrected = False
for parent_id in _stack:
if corrections[parent_id][1] is None or \
corrections[parent_id][1] < o.endtime:
corrections[parent_id][1] = o.endtime
corrected = True
if corrected:
logging.debug(
"Correcting parents' endtime to {0} based on {2}={1}"
.format(o.endtime, o.id, node_class))
if _stack and o.id == _stack[-1]:
_stack.pop()
def start_suite(self, suite):
self._correct_starts(suite, "suite")
def end_suite(self, suite):
self._correct_ends(suite, "suite")
def start_test(self, test):
self._correct_starts(test, "test")
def end_test(self, test):
self._correct_ends(test, "test")
def start_keyword(self, keyword):
self._correct_starts(keyword, "kw")
def end_keyword(self, keyword):
self._correct_ends(keyword, "kw") | /robotframework_reportportal_updated-1.1.10-py3-none-any.whl/robotframework_reportportal_updated/time_visitor.py | 0.458349 | 0.230941 | time_visitor.py | pypi |
from robot.api import logger
from .model import LogMessage
def write(msg, level='INFO', html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the given level.
Valid log levels are ``TRACE``, ``DEBUG``, ``INFO`` (default since RF
2.9.1), ``WARN``, and ``ERROR`` (new in RF 2.9). Additionally it is
possible to use ``HTML`` pseudo log level that logs the message as HTML
using the ``INFO`` level.
Attachment should contain a dict with "name", "data" and "mime" values
defined. See module example.
Instead of using this method, it is generally better to use the level
specific methods such as ``info`` and ``debug`` that have separate
:param msg: argument to control the message format.
:param level: log level
:param html: format or not format the message as html.
:param attachment: a binary content to attach to the log entry
:param launch_log: put the log entry on Launch level
"""
log_message = LogMessage(msg)
log_message.level = level
log_message.attachment = attachment
log_message.launch_log = launch_log
logger.write(log_message, level, html)
def trace(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``TRACE`` level."""
write(msg, "TRACE", html, attachment, launch_log)
def debug(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``DEBUG`` level."""
write(msg, "DEBUG", html, attachment, launch_log)
def info(msg, html=False, also_console=False, attachment=None,
launch_log=False):
"""Write the message to the log file using the ``INFO`` level.
If ``also_console`` argument is set to ``True``, the message is
written both to the log file and to the console.
"""
write(msg, "INFO", html, attachment, launch_log)
if also_console:
console(msg)
def warn(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``WARN`` level."""
write(msg, "WARN", html, attachment, launch_log)
def error(msg, html=False, attachment=None, launch_log=False):
"""Write the message to the log file using the ``ERROR`` level."""
write(msg, "ERROR", html, attachment, launch_log)
def console(msg, newline=True, stream="stdout"):
"""Write the message to the console.
If the ``newline`` argument is ``True``, a newline character is
automatically added to the message.
By default the message is written to the standard output stream.
Using the standard error stream is possibly by giving the ``stream``
argument value ``'stderr'``.
"""
logger.console(msg, newline, stream) | /robotframework_reportportal-5.4.0-py3-none-any.whl/robotframework_reportportal/logger.py | 0.819857 | 0.404802 | logger.py | pypi |
import logging
from robot.api import ResultVisitor
_stack = []
corrections = {}
class TimeVisitor(ResultVisitor):
@staticmethod
def _correct_starts(o, node_class):
"""
starttime wants to be the oldest start time of its children.
only correcting null starttime.
"""
if o.starttime:
corrected = False
for parent_id in _stack:
if corrections[parent_id][0] is None or \
corrections[parent_id][0] > o.starttime:
corrections[parent_id][0] = o.starttime
corrected = True
if corrected:
logging.debug(
"Correcting parents' starttime to {0} based on {2}={1}"
.format(o.starttime, o.id, node_class))
else:
_stack.append(o.id)
corrections[o.id] = [None, None]
@staticmethod
def _correct_ends(o, node_class):
"""
endtime wants to be the newest end time of its children.
only correcting null endtime.
"""
if o.endtime:
corrected = False
for parent_id in _stack:
if corrections[parent_id][1] is None or \
corrections[parent_id][1] < o.endtime:
corrections[parent_id][1] = o.endtime
corrected = True
if corrected:
logging.debug(
"Correcting parents' endtime to {0} based on {2}={1}"
.format(o.endtime, o.id, node_class))
if _stack and o.id == _stack[-1]:
_stack.pop()
def start_suite(self, suite):
self._correct_starts(suite, "suite")
def end_suite(self, suite):
self._correct_ends(suite, "suite")
def start_test(self, test):
self._correct_starts(test, "test")
def end_test(self, test):
self._correct_ends(test, "test")
def start_keyword(self, keyword):
self._correct_starts(keyword, "kw")
def end_keyword(self, keyword):
self._correct_ends(keyword, "kw") | /robotframework_reportportal-5.4.0-py3-none-any.whl/robotframework_reportportal/time_visitor.py | 0.458349 | 0.230941 | time_visitor.py | pypi |
import json
import sys
import jsonschema
from robot.api.deco import keyword
from robot.errors import RobotError
from robot.libraries.BuiltIn import BuiltIn
from robot.libraries.Collections import Collections
from robot.libraries.OperatingSystem import OperatingSystem
from robot.utils import DotDict
def _format_response(response):
response_dict = {}
try:
content_to_json = response.json()
if not isinstance(content_to_json, list):
response_dict['json'] = DotDict(content_to_json.items())
else:
response_dict['json'] = content_to_json
response_dict['is_valid_json'] = True
except Exception:
response_dict['json'] = response.text
response_dict['is_valid_json'] = False
try:
response_dict['headers'] = DotDict(response.headers.items())
except Exception:
response_dict['headers'] = response.headers
return response, DotDict(response_dict.items())
def _to_json(content):
py3 = sys.version_info > (3,)
""" Convert a string to a JSON object
``content`` String content to convert into JSON
"""
if py3 and isinstance(content, bytes):
content = content.decode(encoding='utf-8')
json_ = json.loads(content)
return json_
def _format_list_content(content):
try:
if not isinstance(content, list):
return _to_json(content)
except Exception:
raise RobotError("Response is not a list")
return content
class ApiLibKeywords(object):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self):
self.errori = []
self.collections = Collections()
self.builtin = BuiltIn()
self.operatingsystem = OperatingSystem()
@keyword
def response_content_is_valid_json(self, response):
"""
Verifies that response content is a valid JSON
``response`` is either the Response object from RequestsLibrary or
the DotDict created by RequestsExtension after a request
"""
if "is_valid_json" in response:
if not (response["is_valid_json"]):
raise RobotError('Response content is not a valid json')
else:
try:
_to_json(content=response.content)
except Exception:
raise RobotError('Response content is not a valid json')
@keyword
def response_content_should_contain_keys(self, content, *keys):
"""
Verifies that response content (a valid dictionary) contains certain keys
``content`` is the response content dict
``*keys`` are the dictionary keys to be found in response content
"""
if isinstance(content, (str, bytes, bytearray)):
content = _to_json(content)
self.collections.list_should_contain_sub_list(dict(content).keys(), keys)
@keyword
def response_headers_should_contain_keys(self, headers, *keys):
"""
Verifies that response headers (a valid dictionary) contains certain keys
``headers`` is the response headers dict\n\n``*keys`` are the
dictionary keys to be found in response headers
"""
self.response_content_should_contain_keys(headers, *keys)
@keyword
def each_response_element_should_contain_keys(self, content, *keys):
"""
Verifies that each element of response content (a valid list)
contains certain keys
``content`` is the response content dict
``*keys`` are the dictionary keys to be found in response content
"""
content = _format_list_content(content)
for item in content:
self.collections.list_should_contain_sub_list(item.keys(), keys)
@keyword
def each_response_element_of_key_should_be_contained_in_list(self, content, key, *_list):
"""
Checks that each element of a ``content``, having a certain ``key``,
has a value contained in a ``*_list``
``content`` is the response content (a valid JSON) that represents a list
``key`` is the key whose value is to be obtained, it can be
specified via dot-notation
``*_list`` should contain the retrieved value
"""
content = _format_list_content(content)
key = key.split(".")
errors = []
for item in content:
for k in key:
item = item.get(k)
try:
self.collections.list_should_contain_value(_list, item)
except Exception as e:
errors.append(str(e))
if len(errors) > 0:
self.builtin.fail("\n".join(errors))
@keyword
def response_content_should_be_a_list(self, content):
"""
Verifies that ``content`` (a valid JSON) represents a list
"""
_format_list_content(content)
@keyword
def validate_json_with_schema(self, instance, schema):
"""
Verifies that a JSON ``instance`` is validated by a ``schema``
"""
jsonschema.validate(instance=instance, schema=schema)
@keyword
def get_request_formatted(self, *args, **kwargs):
"""
Send a GET request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys
having as values dotdict objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``data`` a dictionary of key-value pairs that will be urlencoded and
sent as GET data or binary data that is sent as the raw body content
``json`` a value that will be json encoded and sent as GET data if data
s not specified
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect
following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.get_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def post_request_formatted(self, *args, **kwargs):
"""
Send a POST request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys having
as values dotdict objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded and sent
as POST data or binary data that is sent as the raw body content or passed
as such for multipart form data if files is also defined
``json`` a value that will be json encoded and sent as POST data if
files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.post_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def put_request_formatted(self, *args, **kwargs):
"""
Send a PUT request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys having
as values dotdict objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded and sent as
PUT data or binary data that is sent as the raw body content or passed as such
for multipart form data if files is also defined
``json`` a value that will be json encoded and sent as PUT data if files or data
is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PUT to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.put_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def options_request_formatted(self, *args, **kwargs):
"""
Send a OPTIONS request on the session object found using the given alias
This keyword returns two variables, the first is the request response, the second
is a dictionary containing ``headers`` and ``json`` keys having as values dotdict
objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.options_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def patch_request_formatted(self, *args, **kwargs):
"""
Send a PATCH request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys having
as values dotdict objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded and sent as
PATCH data or binary data that is sent as the raw body content or passed
as such for multipart form data if files is also defined
``json`` a value that will be json encoded and sent as PATCH data if files
or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.patch_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def head_request_formatted(self, *args, **kwargs):
"""
Send a HEAD request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys
having as values dotdict objects and a boolean ``is_valid_json``
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect
following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.head_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def delete_request_formatted(self, *args, **kwargs):
"""
Send a DELETE request on the session object found using the given alias
This keyword returns two variables, the first is the request response,
the second is a dictionary containing ``headers`` and ``json`` keys having
as values dotdict objects and a boolean ``is_valid_json``
alias that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded and sent as request data if data
is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
requests_library = BuiltIn().get_library_instance('RequestsLibrary')
response = requests_library.delete_on_session(*args, **kwargs)
return _format_response(response)
@keyword
def each_response_element_should_contain_dictionary(self, content, **dictionary):
"""
Verifies that each element of response content (a valid list)
contains a dictionary recursively (See Subdictionary is Contained
in Dictionary Recursive kw)
``content`` is the response content dict
``**dictionary`` is the dictionary that is part of each
element of response content
"""
content = _format_list_content(content)
for item in content:
self.subdictionary_is_contained_in_dictionary_recursive(dictionary, item)
@keyword
def response_headers_should_contain_dictionary(self, headers, **dictionary):
"""
Verifies that response ``headers``recursively contains a
``**dictionary`` (See `Subdictionary is Contained in Dictionary
Recursive` keyword)
"""
self.subdictionary_is_contained_in_dictionary_recursive(dictionary, headers)
@keyword
def response_content_should_contain_dictionary(self, content, **dictionary):
"""
Verifies that response ``content``recursively contains a
``**dictionary`` (See `Subdictionary is Contained in Dictionary
Recursive` keyword)
"""
self.subdictionary_is_contained_in_dictionary_recursive(dictionary, content)
@keyword
def format_uri_with_params(self, uri, *args, **kwargs):
"""
Gets a string that specifies a ``uri`` and formats
ordinal or named braces with ``*args`` or ``**kwargs``
"""
uri = uri.format(*args, **kwargs)
return uri
@keyword
def to_json_dict(self, content):
return _to_json(content)
@keyword
def subdictionary_is_contained_in_dictionary_recursive(self, dict_a, dict_b):
"""
Verifies that ``subdictionary`` in recursively contained
in ``dictionary``, namely it checks that items of ``subdictionary``
and all its subdictionaries and lists are contained in ``dictionary``
``subdictionary`` is the dict that needs to be contained
``dictionary`` is the container dict
"""
self.errori = []
if dict_a != dict_b:
self._dict_compare_rec(dict_a, dict_b)
if len(self.errori) > 0:
raise RobotError("\n".join(self.errori))
def _dict_compare_rec(self, dict_a, dict_b):
for key_a in dict_a.keys():
value_a = dict_a.get(key_a)
if key_a not in dict_b:
self.errori.append("Key: %s not in dict_b" % key_a)
else:
value_b = dict_b.get(key_a)
if value_a != value_b:
if not (isinstance(value_a, type(value_b))):
self.errori.append("Key: %s, Type: %s - Key %s, Type: %s, DIFFERENT TYPES"
% (key_a, type(value_a), key_a, type(value_b)))
else:
if isinstance(value_a, dict):
self._dict_compare_rec(value_a, value_b)
elif isinstance(value_a, list):
result = all(elem in value_b for elem in value_a)
if result:
self.errori.append("List: %s is not contained in list: %s" % (value_a, value_b))
elif value_a != value_b:
self.errori.append(
"Key: %s, Value: %s != Key %s, Value: %s" % (key_a, value_a, key_a, value_b))
@keyword
def get_dotdict_from_file(self, file_path):
"""
Reads the content of a JSON file specified by a ``file_path``
and converts it to a Robot Framework dictionary, accessible via dot-notation
"""
json_file = self.operatingsystem.get_file(file_path)
json_file = _to_json(json_file)
return DotDict(json_file.items())
@keyword
def check_response_length_limit(self, content, limit):
"""
Checks that the response ``content`` (a valid JSON that represents a list)
has a maximum length, defined by ``limit``
"""
if isinstance(content, (str, bytes, bytearray)):
content = _to_json(content)
content_length = len(content)
if content_length > int(limit):
raise RobotError('Length is {}, limit is {}'.format(content_length, limit)) | /robotframework-requests-extension-0.0.6.tar.gz/robotframework-requests-extension-0.0.6/src/RequestsExtension/ApiLibKeywords.py | 0.556159 | 0.190536 | ApiLibKeywords.py | pypi |
import requests
import robot
from RequestsLibrary import log
from RequestsLibrary.compat import urljoin
from RequestsLibrary.utils import is_file_descriptor, warn_if_equal_symbol_in_url_session_less
from robot.api.deco import keyword
from robot.libraries.BuiltIn import BuiltIn
class RequestsKeywords(object):
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
# The following variables are related to session but used in _common_request :(
self.timeout = None
self.cookies = None
self.last_response = None
def _common_request(
self,
method,
session,
uri,
**kwargs):
if session:
method_function = getattr(session, method)
else:
method_function = getattr(requests, method)
self._capture_output()
resp = method_function(
self._merge_url(session, uri),
timeout=self._get_timeout(kwargs.pop('timeout', None)),
cookies=kwargs.pop('cookies', self.cookies),
**kwargs)
log.log_request(resp)
self._print_debug()
log.log_response(resp)
self.last_response = resp
# file descriptors should be closed for files parameter as well
data = kwargs.get('data', None)
if is_file_descriptor(data):
data.close()
return resp
@staticmethod
def _merge_url(session, uri):
"""
Helper method that join session url and request url.
It relies on urljoin that handles quite good join urls and multiple /
but has some counter intuitive behaviours if you join uri starting with /
It handles also override in case a full url (http://etc) is passed as uri.
"""
base = ''
if session:
base = session.url
if session and uri and not session.url.endswith('/'):
base = session.url + '/'
if session and uri and uri.startswith('/'):
uri = uri[1:]
url = urljoin(base, uri)
return url
@keyword("Status Should Be")
def status_should_be(self, expected_status, response=None, msg=None):
"""
Fails if response status code is different than the expected.
``expected_status`` could be the code number as an integer or as string.
But it could also be a named status code like 'ok', 'created', 'accepted' or
'bad request', 'not found' etc.
``response`` is the output of other requests keywords like `GET` or `GET On Session`.
If omitted the last response will be used.
In case of failure an HTTPError will be automatically raised.
A custom failure message ``msg`` can be added like in built-in keywords.
New requests keywords like `GET` or `GET On Session` (starting from 0.8 version) already have an implicit assert
mechanism that, by default, verifies the response status code.
`Status Should Be` keyword can be useful when you disable implicit assert using ``expected_status=anything``.
For example when you have a nested keyword that is used for both OK and ERROR responses:
| *** Test Cases ***
|
| Test Get Request And Make Sure Is A 404 Response
| ${resp}= GET Custom Keyword That Returns OK or ERROR Response case=notfound
| Status Should Be 404 ${resp}
| Should Be Equal As Strings NOT FOUND ${resp.reason}
|
| Test Get Request And Make Sure Is OK
| ${resp}= GET Custom Keyword That Returns OK or ERROR Response case=pass
| Status Should Be 200 ${resp}
| Should Be Equal As Strings OK ${resp.reason}
|
| *** Keywords ***
|
| GET Custom Keyword That Returns OK or ERROR Response
| [Arguments] $case
| [...]
| IF $case == notfound
| $resp= GET [...] expected_status=Anything
| [Return] $resp
| ELSE
| [...]
"""
if not response:
response = self.last_response
self._check_status(expected_status, response, msg)
@keyword("Request Should Be Successful")
def request_should_be_successful(self, response=None):
"""
Fails if response status code is a client or server error (4xx, 5xx).
``response`` is the output of other requests keywords like `GET On Session`.
If omitted the last response will be used.
In case of failure an HTTPError will be automatically raised.
For a more versatile assert keyword see `Status Should Be`.
"""
if not response:
response = self.last_response
self._check_status(None, response, msg=None)
@staticmethod
@keyword("Get File For Streaming Upload")
def get_file_for_streaming_upload(path):
"""
Opens and returns a file descriptor of a specified file to be passed as ``data`` parameter
to other requests keywords.
This allows streaming upload of large files without reading them into memory.
File descriptor is binary mode and read only. Requests keywords will automatically close the file,
if used outside this library it's up to the caller to close it.
"""
return open(path, 'rb')
@keyword('GET')
@warn_if_equal_symbol_in_url_session_less
def session_less_get(self, url, params=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a GET request.
The endpoint used to retrieve the resource is the ``url``, while query
string parameters can be passed as string, dictionary (or list of tuples or bytes)
through the ``params``.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs`` here is a list:
| ``data`` | Dictionary, list of tuples, bytes, or file-like object to send in the body of the request. |
| ``json`` | A JSON serializable Python object to send in the body of the request. |
| ``headers`` | Dictionary of HTTP Headers to send with the request. |
| ``cookies`` | Dict or CookieJar object to send with the request. |
| ``files`` | Dictionary of file-like-objects (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. |
| ``auth`` | Auth tuple to enable Basic/Digest/Custom HTTP Auth. |
| ``timeout`` | How many seconds to wait for the server to send data before giving up, as a float, or a ``(connect timeout, read timeout)`` tuple. |
| ``allow_redirects`` | Boolean. Enable/disable (values ``${True}`` or ``${False}``) GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``${True}``. |
| ``proxies`` | Dictionary mapping protocol or protocol and host to the URL of the proxy (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) |
| ``verify`` | Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``${True}``. Warning: if a session has been created with ``verify=${False}`` any other requests will not verify the SSL certificate. |
| ``stream`` | if ``${False}``, the response content will be immediately downloaded. |
| ``cert`` | if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. |
For more updated and complete information verify the official Requests api documentation:
https://requests.readthedocs.io/en/latest/api/
"""
response = self._common_request('get', None, url,
params=params, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('POST')
@warn_if_equal_symbol_in_url_session_less
def session_less_post(self, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a POST request.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request('post', None, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('PUT')
@warn_if_equal_symbol_in_url_session_less
def session_less_put(self, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a PUT request.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request("put", None, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('HEAD')
@warn_if_equal_symbol_in_url_session_less
def session_less_head(self, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a HEAD request.
The endpoint used to retrieve the HTTP headers is the ``url``.
``allow_redirects`` parameter is not provided, it will be set to ``${False}`` (as
opposed to the default behavior ``${True}``).
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request('head', None, url, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('PATCH')
@warn_if_equal_symbol_in_url_session_less
def session_less_patch(self, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a PUT request.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request('patch', None, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('DELETE')
@warn_if_equal_symbol_in_url_session_less
def session_less_delete(self, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a DELETE request.
The endpoint used to send the request is the ``url`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request("delete", None, url, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('OPTIONS')
@warn_if_equal_symbol_in_url_session_less
def session_less_options(self, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a OPTIONS request.
The endpoint used to retrieve the resource is the ``url``.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
response = self._common_request("options", None, url, **kwargs)
self._check_status(expected_status, response, msg)
return response | /robotframework_requests-1.0a4-py3-none-any.whl/RequestsLibrary/RequestsKeywords.py | 0.741393 | 0.188828 | RequestsKeywords.py | pypi |
from .RequestsOnSessionKeywords import RequestsOnSessionKeywords
from .version import VERSION
"""
** Inheritance structure **
Not exactly a best practice but forced by the fact that RF libraries
are instance of a class.
RequestsKeywords (common requests and sessionless keywords)
|_ SessionKeywords (session creation and data)
|_ RequestsOnSessionKeywords (new keywords that use sessions)
RequestsLibrary (extends RequestsOnSessionKeywords, DeprecatedKeywords)
"""
class RequestsLibrary(RequestsOnSessionKeywords):
""" RequestsLibrary is a Robot Framework library aimed to provide HTTP api testing functionalities
by wrapping the well known Python Requests Library.
== Table of contents ==
%TOC%
= Usage =
The quickest way to start is using the requests keywords and urls see below examples:
| *** Settings ***
| Library RequestsLibrary
|
| *** Test Cases ***
| Quick Get Request Test
| ${response}= GET https://www.google.com
|
| Quick Get Request With Parameters Test
| ${response}= GET https://www.google.com/search params=query=ciao expected_status=200
|
| Quick Get A JSON Body Test
| ${response}= GET https://jsonplaceholder.typicode.com/posts/1
| Should Be Equal As Strings 1 ${response.json()}[id]
In order to share the HTTP Session (with the same url, headers, cookies, etc.) among multiple requests,
a new connection needs to be prepared with ``Create Session`` and passed to the `* On Session` keywords.
You can then execute any `* On Session` keywords on the shared session by passing the created session alias
name, this will increase performances since the connection and ssl handshake is recycled and not repeated for
each requests.
Below some more advanced examples:
| *** Settings ***
| Library Collections
| Library RequestsLibrary
|
| Suite Setup Create Session jsonplaceholder https://jsonplaceholder.typicode.com
|
| *** Test Cases ***
|
| Get Request Test
| Create Session google http://www.google.com
|
| ${resp_google}= GET On Session google / expected_status=200
| ${resp_json}= GET On Session jsonplaceholder /posts/1
|
| Should Be Equal As Strings ${resp_google.reason} OK
| Dictionary Should Contain Value ${resp_json.json()} sunt aut facere repellat provident
|
| Post Request Test
| &{data}= Create dictionary title=Robotframework requests body=This is a test! userId=1
| ${resp}= POST On Session jsonplaceholder /posts json=${data} expected_status=anything
|
| Status Should Be 201 ${resp}
| Dictionary Should Contain Key ${resp.json()} id
= Response Object =
All the HTTP requests keywords (GET, POST, PUT, etc.) return an extremely useful Response object.
The Response object contains a server's response to an HTTP request.
You can access the different attributes with the dot notation in this way: ``${response.json()}`` or
``${response.text}``. Below the list of the most useful attributes:
| = Attributes = | = Explanation = |
| content | Content of the response, in bytes. |
| cookies | A CookieJar of Cookies the server sent back. |
| elapsed | The amount of time elapsed between sending the request and the arrival of the response (as a timedelta). This property specifically measures the time taken between sending the first byte of the request and finishing parsing the headers. It is therefore unaffected by consuming the response content or the value of the stream keyword argument. |
| encoding | Encoding to decode with when accessing ``response.text.`` |
| headers | Case-insensitive Dictionary of Response Headers. For example, ``headers['content-encoding']`` will return the value of a `Content-Encoding' response header. |
| history | A list of Response objects from the history of the Request. Any redirect responses will end up here. The list is sorted from the oldest to the most recent request. |
| json | Returns the json-encoded content of a response, if any. Parameters: ``**kwargs`` - Optional arguments that json.loads takes. Raises: ValueError ? If the response body does not contain valid json. |
| ok | Returns True if status_code is less than 400, False if not. |
| reason | Textual reason of responded HTTP Status, e.g. ``Not Found`` or ``OK``. |
| status_code | Integer Code of responded HTTP Status, e.g. 404 or 200. |
| text | Content of the response, in unicode. If ``response.encoding`` is ``None``, encoding will be guessed using chardet. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``response.encoding`` appropriately before accessing this property. |
| url | Final URL location of Response. |
= POST a Multipart-Encoded File =
RequestsLibrary makes it simple to upload Multipart-encoded files, but in order to make sure that the
Python Library provides automatically the right ``Content-Length`` and ``multipart/form-data; boundary=...``
headers you SHOULD NOT provide those headers manually, use the keyword
`Get File For Streaming Upload` instead that opens the files in binary mode.
Below an example of multiple file sent over a single POST:
| Test Post Multiple Files
| ${file_1}= Get File For Streaming Upload files/randombytes.bin
| ${file_2}= Get File For Streaming Upload files/randombytes.bin
| ${files}= Create Dictionary randombytes1 ${file_1} randombytes2 ${file_2}
|
| ${resp}= POST https://someurl files=${files}
You can find a working test example in `atests/test_post_multipart.robot`.
For a complete reference verify the official Requests documentation:
- https://2.python-requests.org/en/master/user/quickstart/#post-a-multipart-encoded-file
- https://2.python-requests.org/en/master/user/advanced/#post-multiple-multipart-encoded-files
"""
__version__ = VERSION
ROBOT_LIBRARY_SCOPE = 'GLOBAL' | /robotframework_requests-1.0a4-py3-none-any.whl/RequestsLibrary/__init__.py | 0.819713 | 0.378 | __init__.py | pypi |
from robot.api.deco import keyword
from RequestsLibrary.utils import warn_if_equal_symbol_in_url_on_session
from .SessionKeywords import SessionKeywords
class RequestsOnSessionKeywords(SessionKeywords):
@keyword("GET On Session")
@warn_if_equal_symbol_in_url_on_session
def get_on_session(self, alias, url, params=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a GET request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to retrieve the resource is the ``url``, while query
string parameters can be passed as string, dictionary (or list of tuples or bytes)
through the ``params``.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("get", session, url,
params=params, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword("POST On Session")
@warn_if_equal_symbol_in_url_on_session
def post_on_session(self, alias, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a POST request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("post", session, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword("PATCH On Session")
@warn_if_equal_symbol_in_url_on_session
def patch_on_session(self, alias, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a PATCH request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("patch", session, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword("PUT On Session")
@warn_if_equal_symbol_in_url_on_session
def put_on_session(self, alias, url, data=None, json=None,
expected_status=None, msg=None, **kwargs):
"""
Sends a PUT request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to send the request is the ``url`` parameter, while its body
can be passed using ``data`` or ``json`` parameters.
``data`` can be a dictionary, list of tuples, bytes, or file-like object.
If you want to pass a json body pass a dictionary as ``json`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("put", session, url,
data=data, json=json, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword('DELETE On Session')
@warn_if_equal_symbol_in_url_on_session
def delete_on_session(self, alias, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a DELETE request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to send the request is the ``url`` parameter.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("delete", session, url, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword("HEAD On Session")
@warn_if_equal_symbol_in_url_on_session
def head_on_session(self, alias, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a HEAD request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to retrieve the HTTP headers is the ``url``.
``allow_redirects`` parameter is not provided, it will be set to ``${False}`` (as
opposed to the default behavior ``${True}``).
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("head", session, url, **kwargs)
self._check_status(expected_status, response, msg)
return response
@keyword("OPTIONS On Session")
@warn_if_equal_symbol_in_url_on_session
def options_on_session(self, alias, url,
expected_status=None, msg=None, **kwargs):
"""
Sends a OPTIONS request on a previously created HTTP Session.
Session will be identified using the ``alias`` name.
The endpoint used to retrieve the resource is the ``url``.
By default this keyword fails if a status code with error values is returned in the response,
this behavior can be modified using the ``expected_status`` and ``msg`` parameters,
read more about it in `Status Should Be` keyword documentation.
In order to disable this implicit assert mechanism you can pass as ``expected_status`` the values ``any`` or
``anything``.
Other optional requests arguments can be passed using ``**kwargs``
see the `GET` keyword for the complete list.
"""
session = self._cache.switch(alias)
response = self._common_request("options", session, url, **kwargs)
self._check_status(expected_status, response, msg)
return response | /robotframework_requests-1.0a4-py3-none-any.whl/RequestsLibrary/RequestsOnSessionKeywords.py | 0.895715 | 0.296785 | RequestsOnSessionKeywords.py | pypi |
from robot.api import logger
from typing import List
import pandas as pd
class DynamicTestCases(object):
"""A Robot Framework test library to dynamically add test cases to the current suite."""
ROBOT_LISTENER_API_VERSION = 3
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
self.ROBOT_LIBRARY_LISTENER = self
self.current_suite = None
def _start_suite(self, suite, result):
self.current_suite = suite
def add_test_case(self, name: str, doc: str, tags: List[str], kwname: str, **kwargs):
"""Adds a test case to the current suite.
`name`: The test case name (str).
`doc`: The documentation for the test case (str).
`tags`: Tags to be associated with the test case (List of str).
`kwname`: The keyword to call (str).
`**kwargs`: Keyword arguments to be passed to the keyword.
Example:
| Add Test Case | Example Test Case | This is a dynamic test case | ['smoke'] | My Keyword | arg1=value1 | arg2=value2 |
"""
test_case = self.current_suite.tests.create(name=name, doc=doc, tags=tags)
args = []
for arg_name, arg_value in kwargs.items():
args.append(f'{arg_name}={arg_value}')
test_case.body.create_keyword(name=kwname, args=args)
# self.suite.tests.append(test_case)
logger.info(f"Added test case '{name}' with keyword '{kwname}' and keyword arguments: {kwargs}")
def read_test_data_and_add_test_cases(self, csv_file_path: str):
"""Reads test data from a CSV file and adds test cases dynamically.
`csv_file_path`: The path to the CSV file containing test data.
Example:
| Read Test Data And Add Test Cases | /path/to/test_data.csv |
"""
try:
df = pd.read_csv(csv_file_path)
for _, row in df.iterrows():
name = row.get('test_name', '')
doc = row.get('test_scenario', '')
tags = row.get('test_tags', '').split(',')
kwname = row.get('keyword', '')
kwargs = {col[:-2]: row[col] for col in df.columns if col.endswith('_v')}
self.add_test_case(name=name, doc=doc, tags=tags, kwname=kwname, **kwargs)
logger.info(f"Successfully added test cases from '{csv_file_path}'.")
except Exception as e:
logger.error(f"Error occurred while reading test data from '{csv_file_path}': {e}") | /robotframework_requestspro-1.2.9-py3-none-any.whl/RequestsProLibrary/DynamicTestCases.py | 0.861363 | 0.416441 | DynamicTestCases.py | pypi |
import json
import copy
import pandas as pd
import ast
from RequestsLibrary import RequestsLibrary
from urllib3.util import Retry
from robot.api import logger
class RequestsProKeywords(RequestsLibrary):
ROBOT_LIBRARY_SCOPE = 'Global'
# DEFAULT_RETRY_METHOD_LIST = list(copy.copy(Retry.DEFAULT_METHOD_WHITELIST))
def __init__(self):
super().__init__()
for base in RequestsProKeywords.__bases__:
base.__init__(self)
self._primers = {}
self.cookies = None
self.timeout = 90
self.verify = False
def trigger_api(self,
test_data_full_path,
test_input_path,
api_name,
base_url,
auth=None):
logger.info('*************************************************************************************************')
logger.info('Trigger API using the parameters specified in test data sheet - ' + api_name)
logger.info('*************************************************************************************************')
logger.info('Step-01: Start - Read and filter test data sheet based on API_NAME')
test_cases_df = pd.read_csv(str(test_data_full_path), dtype=str)
selected_api_df = test_cases_df.loc[test_cases_df['test_name'] == api_name].copy()
selected_api_df = selected_api_df.loc[selected_api_df['tbe'] == 'YES']
selected_api_df = selected_api_df.reset_index(drop=True)
logger.debug('API Name :' + api_name)
logger.info('Step-01: End - Read and filter test data sheet based on API_NAME')
logger.info('Step-02: Start - Identify the request type')
request_type = str(selected_api_df['request_type'][0])
logger.info('Step-02: End - Identify the request type')
logger.info('Step-03: Start - Assign default values to None')
params = None
data = None
# auth = None
files = None
json_data = None
cookies = None
timeout = None
proxies = None
verify = False
logger.info('Step-03: End - Assign default values to None')
logger.info('Step-04: Start - Assign project specific values')
# Check if authorization is required
if auth is not None:
auth = auth.split(',')
logger.debug(auth)
elif pd.isna(selected_api_df.loc[0, 'auth']):
auth = None
else:
auth = str(selected_api_df['auth'].item()).split(',')
logger.info('Step-04: End - Assign project specific values')
logger.info('Step-05: Start - Identify uri')
# Unique identifier for creating a session
alias = str(api_name)
uri = str(selected_api_df['uri'][0])
logger.info('Step-05: End - Identify uri')
logger.info('Step-06: Start - Update the path parameter in URI')
# Identify the path parameters specific columns and create a separate data frame
selected_api_flt_pp_df = selected_api_df.filter(regex='_pp$', axis=1)
# Identify the path parameters specific for the api
selected_api_flt_pp_df = selected_api_flt_pp_df.dropna(axis='columns')
# Path Params value which needs to be passed should be as dictionary
if selected_api_flt_pp_df.empty:
path_params = None
else:
path_params_list = selected_api_flt_pp_df.to_dict('records')[0]
# Remove _pp from the parameter name
path_params = {x.replace('_pp', ''): v for x, v in path_params_list.items()}
# Iterate through each values and update the URL
for key, value in path_params.items():
uri = uri.replace(str('{') + key + str('}'), value)
logger.debug(uri)
logger.info('Step-06: End - Update the path parameter in URI')
logger.info('Step-07: Start - Create the parameter and assign to params')
# Identify the parameters specific columns and create a separate data frame
selected_api_flt_df = selected_api_df.filter(regex='_d$', axis=1)
# Identify the parameters specific for the api
selected_api_flt_df = selected_api_flt_df.dropna(axis='columns')
# Params value which needs to be passed should be as dictionary
if selected_api_flt_df.empty:
params = None
else:
params_list = selected_api_flt_df.to_dict('records')[0]
# Remove _d from the parameter name
params = {x.replace('_d', ''): v for x, v in params_list.items()}
logger.info('Step-07: End - Create the parameter and assign to params')
logger.info('Step-08: Start - Extract other additional information required - data, auth, headers, files')
# Check if data level json is required (body)
if pd.isna(selected_api_df.loc[0, 'data']):
data = None
else:
file_name = str(selected_api_df['data'][0])
with open(test_input_path + api_name + '/' + file_name + '.JSON') as f:
data = json.load(f)
# Check if headers is required
headers = {}
if pd.isna(selected_api_df.loc[0, 'headers']):
headers = {}
else:
headers = {'Content-Type': 'values', 'Accept': 'values'}
headers_type = str(selected_api_df['headers'].item())
headers['Content-Type'] = headers_type
headers['Accept'] = headers_type
# Check if files is required
files_str = ''
if pd.isna(selected_api_df.loc[0, 'files']):
files = None
else:
# Extract the files column content
files_str = str(selected_api_df['files'][0])
# Convert the variable to dict
files = ast.literal_eval(files_str)
# Iterate through the dict and open the files specified in the data sheet
# Files will be opened in binary mode and passed to post
for k, value in files.items():
f = open(test_input_path + api_name + '/' + value, 'rb')
files[k] = f
# List of parameters required for any type of API
logger.info('type :' + request_type)
logger.info('url :' + base_url)
logger.info('uri :' + uri)
logger.info('params :' + str(params))
logger.info('data :' + str(data))
logger.info('auth :' + str(auth))
logger.info('headers :' + str(headers))
logger.info('files :' + str(files_str))
# Static variables for the request
max_retries = 3
backoff_factor = 0.1
disable_warnings = 0
debug = 0
logger.info('Step-08: End - Extract other additional information required - data, auth, headers, files')
logger.info('Step-09: Start - Trigger the API, extract the response and status code')
# Condition to check GET/POST method
if request_type == 'GET':
# Response with status code, response content and api name will be returned as data frame
# Create Session
super(RequestsLibrary, self).create_session(alias=alias, url=base_url, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, proxies=proxies, verify=verify,
debug=debug, max_retries=max_retries,
backoff_factor=backoff_factor,
disable_warnings=1)
# Trigger GET method
response = super(RequestsLibrary, self).get_request(alias=alias, uri=uri, headers=headers, json=data,
params=params, allow_redirects=None, timeout=timeout)
elif request_type == 'PUT':
# Response with status code, response content and api name will be returned as data frame
# Create Session
super(RequestsLibrary, self).create_session(alias=alias, url=base_url, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, proxies=proxies, verify=verify,
debug=debug, max_retries=max_retries,
backoff_factor=backoff_factor,
disable_warnings=1)
# Trigger PUT method
response = super(RequestsLibrary, self).put_request(alias=alias, uri=uri, data=data, json=json_data,
params=params, files=files, headers=headers,
allow_redirects=None, timeout=timeout)
elif request_type == 'POST':
# Create Session
super(RequestsLibrary, self).create_session(alias=alias, url=base_url, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, proxies=proxies, verify=verify,
debug=debug, max_retries=max_retries,
backoff_factor=backoff_factor,
disable_warnings=1)
# Trigger POST method
response = super(RequestsLibrary, self).post_request(alias=alias, uri=uri, data=data, json=json_data,
params=params, headers=headers, files=files,
allow_redirects=None, timeout=timeout)
elif request_type == 'DELETE':
# Create Session
super(RequestsLibrary, self).create_session(alias=alias, url=base_url, headers=headers, cookies=cookies,
auth=auth, timeout=timeout, proxies=proxies, verify=verify,
debug=debug, max_retries=max_retries,
backoff_factor=backoff_factor,
disable_warnings=1)
# Trigger DELETE method
response = super(RequestsLibrary, self).delete_request(alias=alias, uri=uri, data=data, json=json_data,
params=params, headers=headers, allow_redirects=None,
timeout=timeout)
selected_api_df.loc[0, 'API_Response_Code'] = response.status_code
if response.text != '':
response_string = str(response.content.decode("utf-8"))
selected_api_df.loc[0, 'API_Response_Content'] = response_string
else:
selected_api_df.loc[0, 'API_Response_Content'] = ''
logger.info('Step-09: End - Trigger the API, extract the response and status code')
logger.info('*************************************************************************************************')
return selected_api_df
def api_test_cases_list(self, test_data_full_path):
"""
Opens the api test data file from the test inputs
API test data file name should always be api_test_data. File format should be csv
Aruguments:
| Test Data Full Path | Full path of test data file with file name |
Example:
|*Keywords* | *Parameters* |
|*Api Test Cases List* | *C:\\Automation_Repository\\Project_Name\\Test_Inputs\\Project_Name_api_test_data.csv|
"""
logger.info('*************************************************************************************************')
logger.info('Reading API Test Data File and return the APIs to be executed')
logger.info('*************************************************************************************************')
logger.info('Step-01: Start - Read and filter based on tbe')
test_cases_df = pd.read_csv(str(test_data_full_path))
test_cases_df = test_cases_df.loc[test_cases_df['tbe'] == 'YES']
test_cases_dict = test_cases_df.to_dict('records')
logger.info('Step-01: End - Read and filter based on tbe')
logger.info('*************************************************************************************************')
return test_cases_dict | /robotframework_requestspro-1.2.9-py3-none-any.whl/RequestsProLibrary/RequestsProKeywords.py | 0.442877 | 0.155367 | RequestsProKeywords.py | pypi |
from .request_info import request_info
from .libcommons import libcommons
from .data_manager import data_manager
class rest_keywords:
'''
RESTLibrary provides a feature-rich and extensible infrastructure which is required for making any REST/HTTP call along with all the possible range of features which one might need for doing end-to-end REST API automation using robotframework.
All the repetitive tasks which API automation developer has to perform frequently are taken care as part of standard features of the library, like JSON comparison, benchmarking, file upload, file download, authentication, logging/reporting, response channelization, runtime variable resolution etc.
The library exposes a single keyword - Make HTTP Request, which has all the possible parameters to take care of all the features required for end-to-end REST API Automation.
'''
def __init__(self, username='', password='', authType="NoAuth", username_var='username', password_var='password'):
self.username = username if username else libcommons.get_variable('${' + username_var + '}')
self.password = password if password else libcommons.get_variable('${' + password_var + '}')
self.authType = authType
self.username_var = username_var
self.password_var = password_var
def Make_HTTP_Request(self, requestId, url, method='GET', requestHeaders={}, requestBody='', requestDataType='json', responseDataType='json', authType='', expectedStatusCode=200, expectedResponseBody='', username='', password='', files={}, responseVerificationType='full', expectedResponseHeaders={}, expectedResponseSchema=None, verificationScheme=[], downloadFilePath=None, timeout=None, ignoreNodes=[]):
'''
requestid : an alphanumeric value which helps identify an HTTP request uniquely
url : entire URI which needs to be invoked, including query parameters.
method : HTTP method/verb to be used for the REST call, default is GET, other supported values are HEAD, POST, PUT, PATCH and DELETE
requestHeaders : a dictionary of all the headers which need to be passed with HTTP request, default is empty dictionary which means no headers.
requestBody : payload/data to be posted along with the request, it can be either a file-path or content itself
timeout : request timeout in seconds
authType : Type of authentication to be used, default is NoAuth, means no authentication (default can be changed while importing the library with authType=<myDefaultAuthType\> parameter), other supported value is Basic.
You can also choose not to use authType variable and provide auth token on your own while making the request in request headers or url based on your API.
username : username to be used for authentication while making the request, you can also declare a global/suite/test variable ${username} beforehand rather than passing it with each request. username passed with request will override the global variable.
password : password to be used for authentication while making the request, you can also declare a global/suite/test variable ${password} beforehand rather than passing it with each request. password passed with request will override the global variable.
# Response Channelization
sponse Channelization (RC) is special and unique feature of RESTLibrary, which lets you extract and channelize the data from response of one API call to next API call with least efforts and in scalable manner.
It can channelize data from response body and response headers both.
In case of response body channelization you can either channelize selective data or entire body itself based on your need.
While channelizing entire body you also have an option to update it if needed.
## RC Syntax (RC === Response Channelization)
<<<rc, src_request_id, rcType, selector>>>
An RC block is always written between <<< and >>> tokens.
Here are the details about 4 comma separated parameters inside RC block
rc : this is name of the keyword, so it's value will always be rc
src_request_id : this is id of the source request from which you want to extract and channelize the data
rcType : this variable denotes whether you want to extract data from response body or response headers, supported values are body and header
selector : using this variable you can specify what data you want to extract. It can have many values based on rcType.
| rcType | selector | examples |comments
|---------|-----------|-----------------------------------------|-------------------------------------------------------------------------------------------|
|body |jsonPath |$.id |selecting id |
| | |$.items[?(@.name="user1")].id |selecting id of the item which has name as user1 |
| |json |{"name" : "user1_updated"} |will return entire response body with updated name |
| | |{"$.items[*].name" : "user1_updated"} |will return entire response body with updated name of every item in the json |
| | |{"$.items[*].name" : "<<<DELETE\>>>"} |will return entire response body after deleting name node from every item in the json |
| | |{} |will return entire response body as is without any changes |
|header |headerName |etag |will return etag header value from the response headers of the src request |
|---------|-----------|-----------------------------------------|--------------------------------------------------------------------------------------------
# Runtime Variable Resolution
This is again a unique feature of RESTLibrary, which allows you to embed robot variables and RCs inside json files (and any other files as well which have text content type), which you might use for storing request payloads and baselines.
RESTLibrary will ensure to resolve these variables/RCs at runtime when the request is being processed.
# Datetime Stamping
RESTLibrary has a special variable : <<<NOW\>>>
This variable automatically gets replced by the current datetime stamp
datetime format is %d%m%y%H%M%S%f, this always generates a 18 digit number, which is entirely unique
You can embed this variable anywhere in the request body, headers, json payload or benchmark files
This can be used to generate unique data values without any extra efforts.
expectedStatusCode : This parameter facilitates verification of http status code, it's default value is 200
expectedResponseBody : Expected response body or the baseline which you expect the HTTP request to return, it can either be a file path or in-place json.
* You can also use regular expressions in json node values, if you just want to verify pattern rather than actual value.
* You can use <<<SKIP\>>> as the node value if you want to skip the verification of a specific node
responseVerificationType : This parameter influences the json comparison of response body and benchmark. It's default value is 'FULL', other supported value is 'PARTIAL'.
* In case of 'FULL' verification both the JSONs are thoroughly compared from both the sides and any differences found are reported in the robot report
* in case of 'PARTIAL' verification, you can provide partial json with few nodes in expectedResponseBody which you intend to verify (all the nodes in HTTP response will not be verified, only nodes provided in the expectedResponseBody will be verified)
expectedResponseHeaders : Expected response headers which you expect the HTTP request to return, it should be a dictionary
* It is always 'PARTIAL' comparison, it will only compare the headers which you supply as expectedResponseHeaders
* You can use regular expressions here as well, if you just want to verify pattern
expectedResponseSchema : JSON Schema to validate the structure of HTTP response (if you just want to verify the response json structure):
* You can use any JSONSchemaGenerator utility to generate the schema, for example : https://www.jsonschema.net/home
* You can either provide the jsonschema file path or the schema itself as the parameter value
* It uses JSONSchema Draft-7
verificationScheme : this parameter is useful to specify how we want our baseline to get compared with response. It's a list of schemes, each scheme is a json object with predefined structure.
* NotSorted Verification : If there is a list of objects in your response and objects are listed in random order, then using NotSorted scheme you can enforce an orderless comparison with benchmark
* Syntax Example - verificationScheme=[{"path": "$.data","type": "NotSorted","key": "email"}]
* path is the jsonpath of the list node, type is the type of scheme and key is a node-name which has a unique value in each object of the list, you can also provide a composite key with comma separated node-names
* Sort : This will sort a list of objects in your response based on given key before comparing it with benchmark
* Syntax Example - verificationScheme=[{"path": "$.data","type": "Sort","key": "email"}]
* path is the jsonpath of the list node, type is the type of scheme and key is a node-name which has a unique value in each object of the list, you can also provide a composite key with comma separated node-names
* key can be skipped if you have a list of values rather than objects
* you can provide multiple schemes separated by comma as the parameter value
ignoreNodes : using this parameter you can completely ignore a set of nodes from the benchmark comparison, this is a quick way to ignore multiple nodes
* it's a list of jsonpaths of the nodes which you want to ignore
* Syntax example : ignoreNodes=["$.id", "$.data[*].links"]
requestDataType : type of content which is being posted to server, default is JSON, other supported values are TEXT and FILE
responseDataType : type of content which is expected to be returned in response, default is JSON, other supported values are TEXT and FILE
downloadFilePath : full path of the file including filename, where you want the file to get downloaded
files : this is where you provide details of the files which need to be uploaded, it is a dictionary
* below formats are supported:
* {'fieldName' : 'filePath/fileContent'}
* {'fieldName' : ['fileName', 'filePath']}
* {'fieldName' : ['fileName', 'filePath', 'content-type']}
* {'fieldName' : ['fileName', 'filePath', 'content-type', {custom_headers}]}
* you should choose the format which your REST API has implemented, your dev team can provide more info
* you can add any number of files to the dictionary if your API supports multi-file upload
'''
authType = authType if authType else self.authType
username = username if username else self.username
password = password if password else self.password
requestInfo = request_info().Create_Request_Info(requestId, url, method=method, requestHeaders=requestHeaders, requestBody=requestBody, authType=authType, requestDataType=requestDataType, responseDataType=responseDataType, expectedStatusCode=expectedStatusCode, expectedResponseBody=expectedResponseBody, username=username, password=password, files=files, responseVerificationType=responseVerificationType, expectedResponseHeaders=expectedResponseHeaders, expectedResponseSchema=expectedResponseSchema, verificationScheme=verificationScheme, username_var=self.username_var, password_var=self.password_var, downloadFilePath=downloadFilePath, timeout=timeout, ignoreNodes=ignoreNodes)
libcommons.robotBuiltIn.set_suite_variable("${requestInfo}", requestInfo)
requestInfo = libcommons.run_keyword('Generate Http Request', "${requestInfo}")
libcommons.robotBuiltIn.set_suite_variable("${requestInfo}", requestInfo)
requestInfo = libcommons.run_keyword('Process Http Request', "${requestInfo}")
libcommons.robotBuiltIn.set_suite_variable("${requestInfo}", requestInfo)
data_manager.statusStore[requestInfo.requestId] = requestInfo.responseStatusCode
requestInfo = libcommons.run_keyword('Verify Response Against Baselines', "${requestInfo}")
return requestInfo
def Execute_RC(self, input):
'''
input : lets you evaluate all the RC macros, including multiple RCs and NOW immediately and run the response channelization, you can store the value in any variable and use it as required
<<<NOW>>>___SomeText__<<<RC, src request id, body, json path>>>
<<<NOW>>>
<<<RC, src request id, body, json path>>>
<<<RC, src request id, header, header name>>>
we can also have an Entire Response Channelization where entire response of the src request will be channelized after merging with the patch json, patch can be a file path or json conrent
using this we can add, delete and update the nodes from src json response. (deletion can be achieved using <<<delete>>> value)
<<<RC, src request id, body, patch json>>>
Returns:
returns the value after running the response channelization, it could be a single node value or entire json, depending on whether it is normal RC or entire
'''
result = input
result = data_manager.process_data(input)
return result | /robotframework-restlibrary-1.0.tar.gz/robotframework-restlibrary-1.0/src/RESTLibrary/rest_keywords.py | 0.752104 | 0.399987 | rest_keywords.py | pypi |
import re
from robot.api import ExecutionResult, ResultVisitor, logger
from robot.api.deco import library
from robot.libraries.BuiltIn import BuiltIn
from robot.utils.robottypes import is_truthy
duplicate_test_pattern = re.compile(
r"Multiple .*? with name '(?P<test>.*?)' executed in.*? suite '(?P<suite>.*?)'."
)
linebreak = "\n"
@library(scope="GLOBAL")
class RetryFailed:
ROBOT_LISTENER_API_VERSION = 3
def __init__(self, global_retries=0, keep_retried_tests=False, log_level=None):
self.ROBOT_LIBRARY_LISTENER = self
self.retried_tests = []
self.retries = 0
self._max_retries_by_default = int(global_retries)
self.max_retries = global_retries
self.keep_retried_tests = is_truthy(keep_retried_tests)
self.log_level = log_level
self._original_log_level = None
def start_test(self, test, result):
if self.retries:
BuiltIn().set_test_variable("${RETRYFAILED_RETRY_INDEX}", self.retries)
if self.log_level is not None:
self._original_log_level = BuiltIn()._context.output.set_log_level(self.log_level)
for tag in test.tags:
retry_match = re.match(r"(?:test|task):retry\((\d+)\)", tag)
if retry_match:
self.max_retries = int(retry_match.group(1))
return
self.max_retries = self._max_retries_by_default
return
def end_test(self, test, result):
if self.retries and self._original_log_level is not None:
BuiltIn()._context.output.set_log_level(self._original_log_level)
if not self.max_retries:
self.retries = 0
return
if result.status == "FAIL":
if self.retries < self.max_retries:
index = test.parent.tests.index(test)
test.parent.tests.insert(index + 1, test)
result.status = "SKIP"
result.message += "\nSkipped for Retry"
self.retried_tests.append(test.longname)
self.retries += 1
return
else:
result.message += (
f"{linebreak * bool(result.message)}[RETRY] FAIL on {self.retries}. retry."
)
else:
if self.retries:
result.message += (
f"{linebreak * bool(result.message)}[RETRY] PASS on {self.retries}. retry."
)
self.retries = 0
return
def end_suite(self, suite, result):
test_dict = {}
result_dict = {}
for result_test, test in zip(result.tests, suite.tests):
test_dict[test.id] = test
result_dict[test.id] = result_test
result.tests = list(result_dict.values())
suite.tests = list(test_dict.values())
def message(self, message):
if message.level == "WARN":
match = duplicate_test_pattern.match(message.message)
if match and f"{match.group('suite')}.{match.group('test')}" in self.retried_tests:
message.message = (
f"Retry {self.retries}/{self.max_retries} of test '{match.group('test')}':"
)
def output_file(self, original_output_xml):
result = ExecutionResult(original_output_xml)
result.visit(RetryMerger(self.retried_tests, self.keep_retried_tests))
result.save()
class RetryMerger(ResultVisitor):
def __init__(self, retried_tests, keep_retried_tests=False):
self.retried_tests = retried_tests
self.keep_retried_tests = keep_retried_tests
self.test_ids = {}
def start_suite(self, suite):
if self.keep_retried_tests:
return
test_dict = {}
for test in suite.tests:
test_dict[test.name] = test
suite.tests = list(test_dict.values())
def end_suite(self, suite):
for test in suite.tests:
if test.longname in self.retried_tests:
self.test_ids[test.name] = test.id
def start_errors(self, errors):
messages = []
retry_messages = {}
for message in errors.messages:
if message.level == "WARN":
pattern = re.compile(
r"Retry (?P<retries>\d+)/(?P<max_retries>\d+) of test '(?P<test>.+)':"
)
match = pattern.match(message.message)
if match:
link = self._get_test_link(match.group("test"))
message.message = (
f"Test '{link}' has been retried {match.group('retries')} times "
f"(max: {match.group('max_retries')})."
)
message.html = True
retry_messages[match.group("test")] = message
continue
messages.append(message)
errors.messages = sorted(
messages + list(retry_messages.values()), key=lambda m: m.timestamp
)
def _get_test_link(self, test_name):
test_id = self.test_ids.get(test_name)
link = (
f"<a "
f"onclick=\"makeElementVisible('{test_id}')\" "
f'href="#{test_id}" '
f'title="Link to details">'
f"{test_name}"
f"</a>"
if test_id
else test_name
)
return link | /robotframework-retryfailed-0.2.0.tar.gz/robotframework-retryfailed-0.2.0/src/RetryFailed/retry_failed.py | 0.419767 | 0.181155 | retry_failed.py | pypi |
import inspect
import wx
from .. import utils
from ..action.actioninfo import ActionInfo
from ..publish import PUBLISHER
class Plugin(object):
"""Entry point to RIDE plugin API -- all plugins must extend this class.
Plugins can use the helper methods implemented in this class to interact
with the core application. The methods and their arguments are kept stable
across the different RIDE releases to the extent that it is possible.
If the provided methods are not enough, plugins can also interact with the
core directly using properties `tree`, `menubar`, `toolbar`, `notebook` and
`model`. Although these attributes themselves are stable, the functionality
behind them may still change between releases. Users are thus recommended
to propose new helper methods, preferably with patches, for often needed
functionality that is only available through these properties.
:IVariables:
name
Plugin name. Set in `__init__` based on the given name or the class name.
doc
Plugin documentation. Set in `__init__` based on the given doc or
the class docstring.
metadata
Plugin metadata. Set in `__init__` based on the given metadata.
initially_enabled
Specifies should the plugin be enabled when first loaded.
Set in `__init__`.
"""
tree = property(lambda self: self.__frame.tree, doc='Provides access to the suite and resource tree')
filemgr = property(lambda self: self.__frame.filemgr, doc='Provides access to the files and folders explorer')
menubar = property(lambda self: self.__frame.GetMenuBar(), doc='Provides access to the application menubar')
toolbar = property(lambda self: self.__frame.GetToolBar(), doc='Provides access to the application toolbar')
notebook = property(lambda self: self.__frame.notebook, doc='Provides access to the tabbed notebook')
model = property(lambda self: self.__app.model, doc='Provides access to the data model')
frame = property(lambda self: self.__frame, doc='Reference to the RIDE main frame')
datafile = property(lambda self: self.get_selected_datafile(), doc='Currently selected datafile')
global_settings = property(lambda self: self.__app.settings, doc='Settings read from settings.cfg')
def __init__(self, application, name=None, doc=None, metadata=None, default_settings=None, initially_enabled=True):
"""Initialize the plugin with the provided data.
The provided information is mainly used by the plugin manager. Simple
plugins are often fine with the defaults. If this method is overridden,
the plugin must call it explicitly::
from robotide.pluginapi import Plugin
class MyCoolPluginExample(Plugin):
\"\"\"This extra cool docstring is used as the plugin doc.\"\"\"
def __init__(self, application):
Plugin.__init__(self, application, metadata={'version': '0.1'},
default_settings={'color': 'red', 'x': 42})
Plugins should not create any user interface elements at this point but
wait until the `enable` method is called.
:Parameters:
application
RIDE application reference.
name
Name of the plugin. If not specified, the name is got from the
plugin class name-dropping possible ``Plugin`` from the end.
doc
Plugin documentation. If not specified, the doc is got from the
plugin class docstring.
metadata
A dictionary of free metadata shown on the plugin manager. Values
containing URLs will be shown as links.
default_settings
A dictionary of settings and their default values. Settings are
automatically stored onto RIDE configuration file, can be
accessed using direct attribute access via `__getattr__`, and new
settings can be saved using `save_setting`.
initially_enabled
Specifies should the plugin be enabled when loaded for the first
time. Users can change the status later from the plugin manager.
"""
self.name = name or utils.name_from_class(self, drop='Plugin')
self.doc = self._get_doc(doc)
self.metadata = metadata or {}
self.initially_enabled = initially_enabled
self._save_timer = None
self.__app = application
self.__frame = application.frame
self.__namespace = application.namespace
self.__settings = application.settings['Plugins'].add_section(self.name)
self.__settings.set_defaults(default_settings)
self.__actions = []
def _get_doc(self, given_doc):
if given_doc:
return given_doc
if self.__doc__ == Plugin.__doc__:
return ''
return inspect.getdoc(self) or ''
def __getattr__(self, name):
"""Provides convenient attribute access to saved settings.
For example, setting ``color`` can be accessed directly like
``self.color``.
"""
if '__settings' not in name and self.__settings.has_setting(name):
return self.__settings[name]
raise AttributeError("No attribute or settings with name '%s' found" % name)
def save_setting(self, name, value, override=True, delay=0):
"""Saves the specified setting into the RIDE configuration file.
``override`` controls whether a possibly already existing value is
overridden or not. Saved settings can be accessed using direct attribute
access via `__getattr__`.
``delay`` is number defining how many seconds is waited before setting
is saved. This can be used to prevent saving the value while user is
typing it.
"""
self.__settings.set(name, value, autosave=delay == 0, override=override)
self._delay_saving(delay)
def _delay_saving(self, delay):
if not delay:
return
delay = delay * 1000
if not self._save_timer:
self._save_timer = wx.CallLater(delay, self._save_setting_after_delay)
else:
self._save_timer.Restart(delay)
def _save_setting_after_delay(self):
self.__settings.save()
self._save_timer = None
def enable(self):
"""This method is called by RIDE when the plugin is enabled.
Possible integration to UI should be done in this method and removed
when the `disable` method is called.
"""
pass
def disable(self):
"""Called by RIDE when the plugin is disabled.
Undo whatever was done in the `enable` method.
"""
pass
def config_panel(self, parent):
"""Called by RIDE to get the plugin configuration panel.
The panel returned will be integrated into the plugin manager UI, and
can be used e.g. to display configurable settings.
By default, there is no configuration panel.
"""
_ = parent
return None
def register_action(self, action_info):
"""Registers a menu entry and optionally a shortcut and a toolbar icon.
``action_info`` is an instance of `ActionInfo` class containing needed
information to create menu entry, keyboard shortcut and/or toolbar
button for the action.
All registered actions can be un-registered using the
`unregister_actions` method.
If register action is used in menu event handler, and it modifies the
menu that triggered the event, it is safest to wrap register action
call inside wx.CallAfter function.
Returns created `Action` object.
"""
action = self.__frame.actions.register_action(action_info)
self.__actions.append(action)
return action
def register_shortcut(self, shortcut, callback):
action_info = ActionInfo(None, None, action=callback, shortcut=shortcut)
action = self.__frame.actions.register_shortcut(action_info)
self.__actions.append(action)
return action
def register_actions(self, action_infos):
"""Registers multiple menu entries and shortcuts/icons.
``action_infos`` is a list of same `ActionInfo` objects that
`register_action` method accepts.
Returns list of created `Action` objects.
"""
return [self.register_action(info) for info in action_infos]
def register_search_action(self, description, handler, icon, default=False):
self.__frame.toolbar.register_search_handler(description, handler, icon, default=default)
def unregister_actions(self):
"""Unregisters all actions registered by this plugin."""
for action in self.__actions:
action.unregister()
self.__actions = []
def add_tab(self, tab, title, allow_closing=True):
"""Adds the ``tab`` with the ``title`` to the tabbed notebook and shows it.
The ``tab`` can be any wxPython container. ``allow_closing`` defines
can users close the tab while the plugin is enabled.
"""
self.notebook.add_tab(tab, title, allow_closing)
def show_tab(self, tab):
"""Makes the ``tab`` added using `add_tab` visible."""
self.notebook.show_tab(tab)
def delete_tab(self, tab):
"""Deletes the ``tab`` added using `add_tab`."""
self.notebook.delete_tab(tab)
def allow_tab_closing(self, tab):
"""Allows closing a tab that has been created using allow_closing=False."""
self.notebook.allow_closing(tab)
def disallow_tab_closing(self, tab):
"""Disallows closing a tab by user"""
self.notebook.disallow_closing(tab)
def tab_is_visible(self, tab):
"""Returns is the ``tab`` added using `add_tab` visible or not."""
return self.notebook.tab_is_visible(tab)
def new_suite_can_be_opened(self):
"""Checks are there modified files and asks user what to do if there are.
Returns False if there were modified files and user canceled the dialog,
otherwise returns True.
"""
return self.__app.ok_to_open_new()
def open_suite(self, path):
"""Opens a test suite specified by the ``path``.
No suite is opened if parsing the suite fails.
"""
self.__frame.open_suite(path)
def get_selected_datafile(self):
"""Returns the data file that is currently selected in the tree.
If a test case or a keyword is selected, the data file containing the
selected item is returned.
:rtype:
`InitFile`, `TestCaseFile` or `ResourceFile`
"""
if not self.tree:
return
return self.tree.get_selected_datafile()
def save_selected_datafile(self):
"""Saves the data file that is currently selected in the tree.
If a test case or a keyword is selected, the data file containing the
selected item is saved.
"""
self.__frame.save(self.tree.get_selected_datafile_controller())
def is_unsaved_changes(self):
"""Returns True if there is any unsaved changes, otherwise False"""
return self.__frame.has_unsaved_changes()
def save_all_unsaved_changes(self):
"""Saves all the data files that are modified."""
self.__frame.save_all()
def get_selected_item(self):
"""Returns the item that is currently selected in the tree.
The item can be a test suite, a resource file, a test case or a keyword.
:rtype:
`InitFile`, `TestCaseFile`, `ResourceFile`, `TestCase` or `UserKeyword`
"""
if not self.tree:
return
return self.tree.get_selected_item()
def content_assist_values(self, value=''):
"""Returns content assist values for currently selected item."""
return self.__namespace.get_suggestions_for(self.get_selected_item(), value)
def get_user_keyword(self, name):
"""Returns user keyword instance whose name is ``name`` or None."""
keyword_info = self.__namespace.find_user_keyword(self.datafile, name)
return keyword_info.item if keyword_info else None
def select_user_keyword_node(self, uk):
"""Selects node containing the given ``uk`` in the tree."""
if not self.tree:
return
self.tree.select_user_keyword_node(uk)
def get_keyword(self, name):
"""Returns the keyword object with the given name or None"""
return self.__namespace.find_keyword(self.datafile, name)
def get_keyword_details(self, name):
"""Returns details (documentation, source) of keyword with name ``name``.
Returns None if no matching keyword is found.
"""
return self.__namespace.keyword_details(self.datafile, name)
def is_user_keyword(self, name):
"""Returns whether ``name`` is a user keyword of current datafile.
Checks both the datafile's own and imported user keywords for match.
"""
return self.__namespace.is_user_keyword(self.datafile, name)
def is_library_keyword(self, name):
"""Returns whether ``name`` is a keyword imported by current datafile."""
return self.__namespace.is_library_keyword(self.datafile, name)
def all_testcases(self):
"""Returns all test cases from all suites in one, unsorted list"""
return self.model.all_testcases()
def register_content_assist_hook(self, hook):
"""Allows plugin to insert values in content assist dialog.
``hook`` must be a callable, which should take two arguments and
return a list of instances of `ItemInfo` class. When content
assist is requested by user, ``hook`` will be called with the current
dataitem and current value of cell as parameters.
"""
self.__namespace.register_content_assist_hook(hook)
def get_plugins(self):
"""Returns list containing plugin wrapper for every loaded plugin.
Wrapper is an instance of `PluginConnector` if the plugin has loaded
successfully, otherwise it's an instance of `BrokenPlugin`."""
return self.__app.get_plugins()
def publish(self, topic, data):
"""Publishes a message with given topic and client data.
Purpose of this method is to support inter-plugin communication which
is not possible to achieve using custom message classes.
`data` will be passed as an argument to registered listener methods.
"""
PUBLISHER.publish(topic, data)
def subscribe(self, listener, *topics):
"""Start to listen to messages with the given ``topics``.
See the documentation of the `robotide.publish` module for more
information about subscribing to messages and the messaging system
`unsubscribe` and `unsubscribe_all` can be used to stop listening to
certain or all messages.
"""
for topic in topics:
PUBLISHER.subscribe(listener, topic)
def unsubscribe(self, listener, *topics):
"""Stops listening to messages with the given ``topics``.
``listener`` and ``topics`` have the same meaning as in `subscribe`
and a listener/topic combination is unsubscribed only when both of them
match.
"""
for topic in topics:
PUBLISHER.unsubscribe(listener, topic)
def unsubscribe_all(self):
"""Stops to listen to all messages this plugin has subscribed to."""
PUBLISHER.unsubscribe_all(self)
def register_editor(self, item_class, editor_class, activate=True):
"""Register ``editor_class`` as an editor class for model items of type ``item_class``
If ``activate`` is True, the given editor is automatically activated
using `set_active_editor`.
"""
self.__app.register_editor(item_class, editor_class, activate)
def unregister_editor(self, item_class, editor_class):
"""Unregisters ``editor_class`` as an editor class for model items of type ``item_class``"""
self.__app.unregister_editor(item_class, editor_class)
def set_active_editor(self, item_class, editor_class):
"""Activates the specified editor to be used with the specified model item.
The editor must have been registered first by using `register_editor`.
"""
self.__app.activate_editor(item_class, editor_class)
def get_editors(self, item_class):
"""Return all registered editors for the given model item class.
The last editor in the list is the currently active editor.
"""
return self.__app.get_editors(item_class)
def get_editor(self, item_class):
"""Return the current editor class for the given model item class"""
return self.__app.get_editor(item_class)
def highlight_cell(self, tcuk, obj=None, row=-1, column=-1):
"""Highlight a specific row/column of a test case or user keyword"""
if not self.tree:
return
self.tree.select_node_by_data(tcuk)
self.__app.editor.highlight_cell(obj, row, column)
def highlight(self, data, text):
"""Highlight a specific text of a given data's editor"""
if not self.tree:
return
self.tree.highlight(data, text) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/pluginapi/plugin.py | 0.785966 | 0.362828 | plugin.py | pypi |
from robotide import utils
from robotide.spec.iteminfo import LocalVariableInfo
def local_namespace(controller, namespace, row=None):
if row is not None: # can be 0!
return LocalRowNamespace(controller, namespace, row)
return LocalMacroNamespace(controller, namespace)
class LocalMacroNamespace(object):
def __init__(self, controller, namespace):
self._controller = controller
self.namespace = namespace
def get_suggestions(self, start):
return self.namespace.get_suggestions_for(self._controller, start)
def has_name(self, value):
for sug in self.namespace.get_suggestions_for(self._controller, value):
if sug.name == value:
return True
try:
if value in sug.assign:
return True
except AttributeError:
pass
return False
class LocalRowNamespace(LocalMacroNamespace):
def __init__(self, controller, namespace, row):
LocalMacroNamespace.__init__(self, controller, namespace)
self._row = row
def get_suggestions(self, start):
suggestions = LocalMacroNamespace.get_suggestions(self, start)
if self._could_be_variable(start):
suggestions = self._harvest_local_variables(start, suggestions)
else:
suggestions = self._harvest_local_variables('${'+start, suggestions)
suggestions = self._harvest_local_variables('@{'+start, suggestions)
suggestions = self._harvest_local_variables('&{'+start, suggestions)
return suggestions
def _harvest_local_variables(self, start, suggestions):
matching_assignments = set()
for row, step in enumerate(self._controller.steps):
if self._row == row:
break
matching_assignments = matching_assignments.union(
val.replace('=', '').strip() for val in step.assignments if
val.startswith(start))
if matching_assignments:
local_variables = [LocalVariableInfo(name) for name
in matching_assignments]
suggestions = sorted(self._remove_duplicates(suggestions,
local_variables))
return suggestions
@staticmethod
def _could_be_variable(start):
return len(start) == 0 or start[0] in ['$', '@', '&']
def has_name(self, value):
if self._row is not None:
for row, step in enumerate(self._controller.steps):
if self._row == row:
break
if step.is_assigning(value):
return True
return LocalMacroNamespace.has_name(self, value)
@staticmethod
def _remove_duplicates(suggestions, local_variables):
def is_unique(gvar):
return utils.normalize(gvar.name) not in [utils.normalize(lvar.name) for lvar in local_variables]
unique = [gvar for gvar in suggestions if is_unique(gvar)]
return unique + local_variables | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/namespace/local_namespace.py | 0.52342 | 0.202542 | local_namespace.py | pypi |
from functools import total_ordering
from robotide import robotapi
class SuggestionSource(object):
def __init__(self, plugin, controller):
self._plugin = plugin
self._controller = controller
def get_suggestions(self, value, row=None):
if self._controller:
try:
return self._controller.get_local_namespace_for_row(row).get_suggestions(value)
except AttributeError:
return self._controller.get_local_namespace.get_suggestions(value)
return self._plugin.content_assist_values(value) # DEBUG: Remove old functionality when no more needed
@total_ordering
class _Suggester(object):
name = None
@staticmethod
def _suggestion(name):
s = lambda: 0
s.name = name
s.longname = name
s.details = None
return s
def __eq__(self, other):
return self.name.lower() == other.name.lower()
def __hash__(self):
return hash(repr(self))
def __gt__(self, other):
return self.name.lower() > other.name.lower()
class HistorySuggester(_Suggester):
def __init__(self):
self._suggestions = list()
def get_suggestions(self, name, *args):
_ = args
return [s for s in self._suggestions if name is None or name.lower() in s.name.lower()]
def store(self, name):
self._suggestions += [self._suggestion(name)]
# DEBUG For now remove sorting self._suggestions.sort()
class _ImportSuggester(_Suggester):
def __init__(self, controller):
self._df_controller = controller.datafile_controller
def get_suggestions(self, name, *args):
_ = args
already_imported = self.get_already_imported()
all_resources = self.get_all_available()
suggestion_names = all_resources - already_imported
return [self._suggestion(n) for n in sorted(suggestion_names) if name in n]
def get_already_imported(self):
return set(imp.name for imp in self._df_controller.imports)
def get_all_available(self):
return NotImplemented
class ResourceSuggester(_ImportSuggester):
def get_all_available(self):
return set(self._df_controller.relative_path_to(r) for r in self._df_controller._project.resources)
class CachedLibrarySuggester(_ImportSuggester):
def get_all_available(self):
return set(self._df_controller.get_all_cached_library_names())
class BuiltInLibrariesSuggester(_Suggester):
def get_suggestions(self, name, *args):
_ = args
return [self._suggestion(n) for n in sorted(robotapi.STDLIB_NAMES)
if name.lower() in n.lower() and n not in ['BuiltIn', 'Reserved', 'Easter']]
class LibrariesSuggester(_Suggester):
def __init__(self, controller, history_suggester):
self._history_suggester = history_suggester
self._cached_suggester = CachedLibrarySuggester(controller)
self._builtin_suggester = BuiltInLibrariesSuggester()
def get_suggestions(self, name, *args):
history = set(h.name for h in self._history_suggester.get_suggestions(name, *args))
cached = set(c.name for c in self._cached_suggester.get_suggestions(name, *args))
builtin = set(b.name for b in self._builtin_suggester.get_suggestions(name, *args))
already_imported = self._cached_suggester.get_already_imported()
return [self._suggestion(s)
for s in sorted((history | cached | builtin)-already_imported,
key=lambda s: s.lower())] | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/namespace/suggesters.py | 0.679179 | 0.165829 | suggesters.py | pypi |
import re
import wx
from ..widgets import ImageProvider
from .shortcut import Shortcut
def action_info_collection(data, event_handler, container=None):
"""Parses the ``data`` into a list of `ActionInfo` and `SeparatorInfo` objects.
The data is parsed based on the simple DSL documented below.
:Parameters:
data
The data to be parsed into `ActionInfo` and `SeparatorInfo` objects.
event_handler
The event handler that implements the actions. See `finding handlers`_
for more information.
container
the wxPython element containing the UI components associated with
the `ActionInfo`.
DSL syntax
----------
::
[menu]
name | documentation | shortcut | icon
Fields
------
menu
The name of the menu under which the entries below it are inserted.
name
The name of the menu entry to be added. If name is ``---``, a
`SeparatorInfo` object is created instead of an `ActionInfo` object.
If name is post fixed with shortcuts between parenthesis and separated
with ' or ', these shortcuts are parsed to machine local presentation
and shown after the name. This can be used instead of shotrcut-element
if you want to add shortcuts that you want to bind yourself and/or add
several shortcuts.
documentation
Documentation for the action.
shortcut
Keyboard shortcut to invoke the action.
icon
Icon for the toolbar button.
position
Value for menu item ordering.
See the `ActionInfo` attributes with same/similar names for more
information about the fields and their possible values. Three
last fields are optional.
Finding handlers
----------------
(Note: before v2.0.7)
The given ``event_handler`` must have handler methods that map to the
specified action names. The mapping is done by prefixing the name with
``On``, removing spaces, and capitalizing all words. For example ``Save``
and ``My Action`` must have handler methods ``OnSave`` and ``OnMyAction``,
respectively. If name has content between parenthesis at the end, this
content is ignored when creating handler mapping.
(Note: since v2.0.7)
The given ``event_handler`` must have handler methods that map to the
specified action names. The mapping is done by prefixing the name with
``on``, replacing spaces by ``_``, and lowercasing all words. For example ``Save``
and ``My Action`` must have handler methods ``on_save`` and ``on_my_action``,
respectively. If name has content between parenthesis at the end, this
content is ignored when creating handler mapping.
Specifying container
--------------------
By default the given ``container`` is passed to the `ActionInfo.__init__`
method directly. This can be altered by prefixing the ``name`` with an
exclamation mark (e.g. ``!Save`` or ``!My Action``) to make that action
global. With these actions the container given to the `ActionInfo.__init__`
is always ``None``.
Example
-------
::
[File]
!&Open | Open file containing tests | Ctrl-O | ART_FILE_OPEN
!Open &Resource | Open a resource file | Ctrl-R
---
&Save | Save selected datafile | Ctrl-S | ART_FILE_SAVE
[Tools]
!Manage Plugins | | | | POSITION-80
[Content]
Content Assist (Ctrl-Space or Ctrl-Alt-Space) | Has two shortcuts.
"""
menu = None
actions = []
for row in data.splitlines():
row = row.strip()
if not row:
continue
elif row.startswith('[') and row.endswith(']'):
menu = row[1:-1].strip()
else:
actions.append(_create_action_info(event_handler, menu, container, row))
return actions
def _create_action_info(eventhandler, menu, container, row):
if row.startswith('---'):
return SeparatorInfo(menu)
tokens = [t.strip() for t in row.split('|')]
tokens += [''] * (5-len(tokens))
name, doc, shortcut, icon, position = tokens
if name.startswith('!'):
name = name[1:]
container = None
eventhandler_name, name = get_eventhandler_name_and_parsed_name(name)
action = getattr(eventhandler, eventhandler_name)
return ActionInfo(menu, name, action, container, shortcut, icon, doc, position)
def get_eventhandler_name_and_parsed_name(name):
eventhandler_name, name = _parse_shortcuts_from_name(name)
# DEBUG: before v2.0.7 return 'On%s' % eventhandler_name.replace(' ', '').replace('&', ''), name
return 'on_%s' % eventhandler_name.strip().replace(' ', '_').replace('&', '').lower(), name
def _parse_shortcuts_from_name(name):
if '(' in name:
eventhandler_name, shortcuts = name.split('(', 1)
shortcuts = shortcuts.split(')')[0]
elements = shortcuts.split(' or ')
name = '%s (%s)' % (eventhandler_name, ' or '.join(Shortcut(e).printable for e in
elements))
return eventhandler_name, name
return name, name
class MenuInfo(object):
"""Base class for `ActionInfo` and `SeparatorInfo`."""
def __init__(self):
self.insertion_point = _InsertionPoint()
def is_separator(self):
return False
def set_menu_position(self, before=None, after=None):
"""Sets the position of this menu entry.
:Parameters:
before
Place this menu entry before the specified entry.
after
Place this menu entry after the specified entry.
Use either ``before`` or ``after`` and give the name without the
possible shortcut.
"""
self.insertion_point = _InsertionPoint(before, after)
class ActionInfo(MenuInfo):
"""Used to create menu entries, keyboard shortcuts and/or toolbar buttons."""
def __init__(self, menu_name, name, action=None, container=None,
shortcut=None, icon=None, doc='', position=-1):
"""Initializes information needed to create actions..
:Parameters:
menu_name
The name of the menu where the new entry will be added. The menu is
created if it does not exist.
name
The name of the new menu entry. The name may contain an accelerator
key prefixed by an ampersand like ``New &Action``. If an accelerator
is not specified, or the one requested is already taken, the next
free key is selected.
action
The callable which will be called when a user does any of the
associated UI actions.
container
The wxPython element containing the UI components associated with
the ``action``. When any of the registered UI actions is executed,
the ``action`` is called only if the ``container`` or any of its
child components has focus. It is possible to make the ``action``
always active by using ``None`` as the ``container``.
shortcut
The keyboard shortcut associated to the ``action``. The ``shortcut``
must be a string constructed from optional modifiers (``Ctrl, Shift,
Alt``) and the actual shortcut key separating the parts with a hyphen.
The shortcut key can be either a single character or any of the
`wx keycodes`__ without the ``WXK_`` prefix. Examples: ``Ctrl-C``,
``Shift-Ctrl-6``, ``Alt-Left``, ``F6``.
icon
The icon added to the toolbar as a toolbar button. It can be either
a 16x16 bitmap or a string presenting one of the icons provided by
`wxPython's ArtProvider`__ like ``ART_FILE_OPEN``.
doc
The documentation shown on the statusbar when selection is on
the associated menu entry or toolbar button.
position
The positional value of an item in the menu. Provided for ordering
Tools menu. Defaults to -1.
__ https://docs.wxwidgets.org/stable/wx_keycodes.html#keycodes
__ https://www.wxpython.org/docs/api/wx.ArtProvider-class.html
"""
MenuInfo.__init__(self)
self.menu_name = menu_name
self.name = name
self.action = action
self.container = container
self.shortcut = Shortcut(shortcut)
self._icon = None
self._icon_source = icon
self.doc = doc
self._position = position
@property
def icon(self):
if not self._icon:
self._icon = self._get_icon()
return self._icon
def _get_icon(self):
if not self._icon_source:
return None
if isinstance(self._icon_source, str):
if self._icon_source.startswith("CUSTOM_"):
return ImageProvider().get_image_by_name(self._icon_source[len("CUSTOM_"):])
return wx.ArtProvider.GetBitmap(getattr(wx, self._icon_source),
wx.ART_TOOLBAR, (16, 16))
return self._icon_source
@property
def position(self):
if isinstance(self._position, int):
return self._position
elif isinstance(self._position, str) and len(self._position) > 0:
return int(self._position.split("POSITION-")[-1])
return -1
class SeparatorInfo(MenuInfo):
"""Used to create separators to menus."""
def __init__(self, menu_name):
"""Initializes information needed to add separators to menus.
:Parameters:
menu_name
The name of the menu where the separator will be added. If menu does
not exist, it is created automatically.
"""
MenuInfo.__init__(self)
self.menu_name = menu_name
def is_separator(self):
return True
class _InsertionPoint(object):
_shortcut_remover = re.compile(r" {2,}\([^()]+\)$")
def __init__(self, before=None, after=None):
self._item = before or after
self._insert_before = before is not None
def get_index(self, menu):
if not self._item:
return menu.GetMenuItemCount()
index = self._find_position_in_menu(menu)
if not index:
return menu.GetMenuItemCount()
if not self._insert_before:
index += 1
return index
def _find_position_in_menu(self, menu):
for index in range(0, menu.GetMenuItemCount()):
item = menu.FindItemByPosition(index)
if self._get_menu_item_name(item).lower() == self._item.lower():
return index
return None
def _get_menu_item_name(self, item):
if wx.VERSION < (4, 1, 0):
return self._shortcut_remover.split(item.GetLabel())[0]
return self._shortcut_remover.split(item.GetItemLabel())[0] | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/action/actioninfo.py | 0.844826 | 0.441191 | actioninfo.py | pypi |
import wx
from wx import grid, Colour
from .clipboard import ClipboardHandler
from ..context import IS_WINDOWS
from ..utils import unescape_newlines_and_whitespaces
from ..widgets import PopupCreator, PopupMenuItems
class GridEditor(grid.Grid):
_col_add_threshold = 6
_popup_items = [
'Insert Cells\tCtrl-Shift-I', 'Delete Cells\tCtrl-Shift-D',
'Insert Rows\tCtrl-I', 'Delete Rows\tCtrl-D', '---',
'Select All\tCtrl-A', '---', 'Cut\tCtrl-X', 'Copy\tCtrl-C',
'Paste\tCtrl-V', 'Insert\tCtrl-Shift-V', '---', 'Delete\tDel']
def __init__(self, parent, num_rows, num_cols, popup_creator=None):
grid.Grid.__init__(self, parent)
try:
self.settings = parent.plugin.global_settings['Grid']
self.general_settings = parent.plugin.global_settings['General']
except AttributeError:
from ..preferences import RideSettings
_settings = RideSettings()
self.general_settings = _settings['General']
self.settings = _settings['Grid']
self.filter_newlines = self.settings.get("filter newlines", True)
self.color_background = self.settings['background unknown']
self.color_foreground = self.settings['text empty']
self.color_background_help = self.general_settings['background help']
self.color_foreground_text = self.general_settings['foreground text']
self.color_secondary_background = self.general_settings['secondary background']
self.color_secondary_foreground = self.general_settings['secondary foreground']
self._bind_to_events()
self.selection = _GridSelection(self)
self._clipboard_handler = ClipboardHandler(self)
self._history = _GridState()
self.CreateGrid(int(num_rows), int(num_cols))
self.SetDefaultCellBackgroundColour(Colour(self.color_background))
self.SetDefaultCellTextColour(Colour(self.color_foreground))
self.GetGridColLabelWindow().SetBackgroundColour(Colour(self.color_secondary_background))
self.GetGridColLabelWindow().SetForegroundColour(Colour(self.color_secondary_foreground))
self.GetGridRowLabelWindow().SetBackgroundColour(Colour(self.color_secondary_background))
self.GetGridRowLabelWindow().SetForegroundColour(Colour(self.color_secondary_foreground))
self._popup_creator = popup_creator or PopupCreator()
def _bind_to_events(self):
self.Bind(grid.EVT_GRID_SELECT_CELL, self.on_select_cell)
self.Bind(grid.EVT_GRID_RANGE_SELECT, self.on_range_select)
self.Bind(grid.EVT_GRID_CELL_RIGHT_CLICK, self.on_cell_right_click)
def register_context_menu_hook(self, cb):
self._popup_creator.add_hook(cb)
def unregister_context_menu_hook(self, cb):
self._popup_creator.remove_hook(cb)
def write_cell(self, row, col, value, update_history=True):
if update_history:
self._update_history()
self._expand_if_necessary(row, col)
if self.filter_newlines:
# unescape \n to support multi lines display in grid cells
value = unescape_newlines_and_whitespaces(value)
self.SetCellValue(row, col, value)
def _expand_if_necessary(self, row, col):
# Changed col and row fill because of blank spacing not changing color
# print(f"DEBUG: GridEditor ENTER_expand_if_necessary row={row}, col={col}")
while self.NumberRows <= max(1, row+1, 10-row): # DEBUG 25 makes slower rendering
self.AppendRows(1)
while self.NumberCols <= max(1, col+1, 10-col): # DEBUG 40 makes slower rendering
self.AppendCols(max(1, self._col_add_threshold)) # DEBUG: was infinite when value was 0
def has_focus(self):
return self.FindFocus() == self
def _update_history(self):
self._history.change(self._get_all_content())
def _get_all_content(self):
return self._get_block_content(range(self.NumberRows),
range(self.NumberCols))
@property
def cell_under_cursor(self):
x, y = self.ScreenToClient(wx.GetMousePosition())
x -= self.RowLabelSize
return self.XYToCell(*self.CalcUnscrolledPosition(x, y))
def select(self, row, column):
self.SelectBlock(row, column, row, column)
self.SetGridCursor(row, column)
self.MakeCellVisible(row, column)
def copy(self):
# print("DEBUG: GridBase copy() called\n")
self._clipboard_handler.copy()
def cut(self):
self._update_history()
self._clipboard_handler.cut()
self._clear_selected_cells()
def _clear_selected_cells(self):
for row, col in self.selection.cells():
self.write_cell(row, col, '', update_history=False)
def paste(self):
self._update_history()
self._clipboard_handler.paste()
def delete(self):
self._update_history()
_iscelleditcontrolshown = self.IsCellEditControlShown()
if _iscelleditcontrolshown:
if IS_WINDOWS:
self._delete_from_cell_editor()
else:
self._clear_selected_cells()
def _delete_from_cell_editor(self):
editor = self.get_cell_edit_control()
start, end = editor.Selection
if start == end:
end += 1
editor.Remove(start, end)
def _is_whole_row_selection(self):
return self.SelectedRows
def get_cell_edit_control(self):
return self.GetCellEditor(*self.selection.cell).GetControl()
def get_selected_content(self):
return self._get_block_content(self.selection.rows(),
self.selection.cols())
def get_single_selection_content(self):
cells = self.get_selected_content()
if len(cells) != 1 or len(cells[0]) != 1:
return None
return cells[0][0]
def _current_cell_value(self):
return self.GetCellValue(*self.selection.cell)
def _get_block_content(self, row_range, col_range):
return [[self.GetCellValue(row, col) for col in col_range]
for row in row_range]
@staticmethod
def _strip_trailing_empty_cells(rowdata):
while rowdata and not rowdata[-1]:
rowdata.pop()
return rowdata
def undo(self):
prev_data = self._history.back()
if prev_data:
self.ClearGrid()
self._write_data(prev_data, update_history=False)
def _write_data(self, data, update_history=True):
self.BeginBatch()
for row_index, row_data in enumerate(data):
for col_index, cell_value in enumerate(row_data):
self.write_cell(row_index, col_index, cell_value, update_history)
self.EndBatch()
def on_select_cell(self, event):
if self._is_whole_row_selection():
self.SelectBlock(self.selection.topleft.row, self.selection.topleft.col,
self.selection.bottomright.row, self.selection.bottomright.col,
addToSelected=True)
else:
self.selection.set_from_single_selection(event)
event.Skip()
def on_range_select(self, event):
if not event.Selecting():
self.selection.clear()
return
if event.ControlDown():
self.SetGridCursor(event.TopRow, event.LeftCol)
self.SelectBlock(event.TopRow, event.LeftCol,
event.BottomRow, event.RightCol, addToSelected=False)
else:
self.selection.set_from_range_selection(self, event)
self._ensure_selected_row_is_visible(event.BottomRow)
def _ensure_selected_row_is_visible(self, bottom_row):
if not self.IsVisible(bottom_row, 0) and bottom_row < self.NumberRows and \
self._is_whole_row_selection():
self.MakeCellVisible(bottom_row, 0)
def on_cell_right_click(self, event):
if hasattr(event, 'Row') and hasattr(event, 'Col'):
if (event.Row, event.Col) not in self.selection.cells():
self.select(event.Row, event.Col)
self.selection.set_from_single_selection(event)
self._popup_creator.show(self, PopupMenuItems(self, self._popup_items),
self.get_selected_content())
# DEBUG: This code is overriden at fieldeditors
def on_insert_cells(self, event):
self._insert_or_delete_cells(self._insert_cells, event)
# DEBUG:This code is overriden at fieldeditors
def on_delete_cells(self, event):
# print("DEBUG delete cells %s" % event)
self._insert_or_delete_cells(self._delete_cells, event)
def _insert_or_delete_cells(self, action, event):
self._update_history()
# print("DEBUG insert or delete selected %s" % self.selection.rows())
for index in self.selection.rows():
data = action(self._row_data(index))
self._write_row(index, data)
self._refresh_layout()
event.Skip()
def _insert_cells(self, data):
cols = self.selection.cols()
left = right = cols[0]
data[left:right] = [''] * len(cols)
return self._strip_trailing_empty_cells(data)
def _delete_cells(self, data):
cols = self.selection.cols()
# print("DEBUG delete cols selected %s" % cols)
left, right = cols[0], cols[-1] # + 1 # DEBUG removed extra cell
# print("DEBUG delete left, right (%d,%d) values %s" % (left, right, data[left:right]))
data[left:right] = []
return data + [''] * len(cols)
def _row_data(self, row):
return [self.GetCellValue(row, col) for col in range(self.NumberCols)]
def _write_row(self, row, data):
for col, value in enumerate(data):
self.write_cell(row, col, value, update_history=False)
def _refresh_layout(self):
self.SetFocus()
self.SetGridCursor(*self.selection.cell)
self.GetParent().Sizer.Layout()
# DEBUG: refactor this internal state away if possible
class _GridSelection(object):
cell = property(lambda self: (self.topleft.row, self.topleft.col))
def __init__(self, gridd):
self._set((0, 0))
self._grid = gridd
def _set(self, topleft, bottomright=None):
self.topleft = _Cell(topleft[0], topleft[1])
self.bottomright = self._count_bottomright(topleft, bottomright)
def _count_bottomright(self, topleft, bottomright):
if not bottomright:
return _Cell(topleft[0], topleft[1])
return _Cell(min(self._grid.NumberRows - 1, bottomright[0]),
min(self._grid.NumberCols - 1, bottomright[1]))
def set_from_single_selection(self, event):
self._set((event.Row, event.Col))
def set_from_range_selection(self, gridd, event):
self._set(*self._get_bounding_coordinates(gridd, event))
def clear(self):
selection = (self._grid.GetGridCursorRow(), self._grid.GetGridCursorCol())
self._set(selection)
@staticmethod
def _get_bounding_coordinates(gridd, event):
whole_row_selection = sorted(gridd.SelectedRows)
if whole_row_selection:
return (whole_row_selection[0], 0), \
(whole_row_selection[-1], gridd.NumberCols - 1)
return (event.TopLeftCoords.Row, event.TopLeftCoords.Col), \
(event.BottomRightCoords.Row, event.BottomRightCoords.Col)
def rows(self):
"""Returns a list containing indices of rows currently selected."""
return range(self.topleft.row, self.bottomright.row + 1)
def cols(self):
"""Returns a list containing indices of columns currently selected."""
return range(self.topleft.col, self.bottomright.col + 1)
def cells(self):
"""Return selected cells as a list of tuples (row, column)."""
return [(row, col) for col in self.cols()
for row in self.rows()]
class _Cell(object):
def __init__(self, row, col):
self.row = row
self.col = col
def __iter__(self):
for item in self.row, self.col:
yield item
class _GridState(object):
def __init__(self):
self._back = []
self._forward = []
def change(self, state):
if not self._back or state != self._back[-1]:
self._back.append(state)
self._forward = []
def back(self):
if not self._back:
return None
self._forward.append(self._back.pop())
return self._forward[-1]
def forward(self):
if not self._forward:
return None
state = self._forward.pop()
self._back.append(state)
return state | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/editor/gridbase.py | 0.469034 | 0.186391 | gridbase.py | pypi |
import wx.grid
class CellRenderer(wx.grid.GridCellRenderer):
"""
GridCellAutoWrapStringRenderer()
This class may be used to format string data in a cell.
"""
def __init__(self, default_width, max_width, auto_fit, word_wrap=True):
wx.grid.GridCellRenderer.__init__(self)
self.default_width = default_width
self.max_width = max_width
self.auto_fit = auto_fit
self.word_wrap = word_wrap
@staticmethod
def _wordwrap(text, width, dc, break_long_words=True, margin=0):
""" modification of original wordwrap function without extra space """
wrapped_lines = []
text = text.split('\n')
for line in text:
pte = dc.GetPartialTextExtents(line)
wid = (width - (2 * margin + 1) * dc.GetTextExtent(' ')[0])
idx = 0
start = 0
start_idx = 0
spc_idx = -1
while idx < len(line):
# remember the last seen space
if line[idx] == ' ':
spc_idx = idx
# have we reached the max width?
if pte[idx] - start > wid and (spc_idx != -1 or break_long_words):
if spc_idx != -1:
idx = min(spc_idx + 1, len(pte) - 1)
wrapped_lines.append(' ' * margin + line[start_idx: idx] + ' ' * margin)
start = pte[idx]
start_idx = idx
spc_idx = -1
idx += 1
wrapped_lines.append(' ' * margin + line[start_idx: idx] + ' ' * margin)
return '\n'.join(wrapped_lines)
def Draw(self, grid, attr, dc, rect, row, col, is_selected):
text = grid.GetCellValue(row, col)
dc.SetFont(attr.GetFont())
suggest_width = grid.GetColSize(col)
text = self._wordwrap(text, suggest_width, dc, break_long_words=False)
h_align, v_align = attr.GetAlignment()
if is_selected:
bg = grid.GetSelectionBackground()
fg = grid.GetSelectionForeground()
else:
bg = attr.GetBackgroundColour()
fg = attr.GetTextColour()
dc.SetTextBackground(bg)
dc.SetTextForeground(fg)
dc.SetBrush(wx.Brush(bg, wx.SOLID))
dc.SetPen(wx.TRANSPARENT_PEN)
dc.DrawRectangle(rect)
grid.DrawTextRectangle(dc, text, rect, h_align, v_align)
def GetBestSize(self, grid, attr, dc, row, col):
"""The width will be between values `col size` and `max col size`
These can be changed in user preferences.
"""
text = grid.GetCellValue(row, col)
dc.SetFont(attr.GetFont())
w, h = dc.GetTextExtent('00') # use 2 digits for size reference
if self.auto_fit:
grid.SetRowMinimalAcceptableHeight(int(h+h/2))
grid.SetColMinimalAcceptableWidth(int(w+w/2))
w, h = dc.GetTextExtent(text)
if self.auto_fit:
col_width = min(w, self.max_width)
else:
col_width = min(w, self.default_width)
if self.word_wrap:
suggest_width = max(grid.GetColSize(col), col_width)
text = self._wordwrap(text, suggest_width, dc, break_long_words=False)
w, h = dc.GetMultiLineTextExtent(text)
if self.auto_fit:
col_width = min(w, col_width)
else:
col_width = min(w, self.default_width)
row_height = h
return wx.Size(col_width, row_height)
def Clone(self): # real signature unknown; restored from __doc__
""" Clone(self) -> GridCellRenderer """
return CellRenderer | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/editor/cellrenderer.py | 0.739328 | 0.176974 | cellrenderer.py | pypi |
import wx
from .editorcreator import EditorCreator
from ..pluginapi import (Plugin, action_info_collection, TreeAwarePluginMixin)
from ..publish import (RideTreeSelection, RideNotebookTabChanging, RideNotebookTabChanged, RideSaving)
from ..publish.messages import RideDataFileRemoved
from ..widgets import PopupCreator
_EDIT = """
[Edit]
&Undo | Undo last modification | Ctrlcmd-Z
&Redo | Redo modification | Ctrlcmd-Y
---
Cu&t | Cut | Ctrlcmd-X
&Copy | Copy | Ctrlcmd-C
&Paste | Paste | Ctrlcmd-V
&Insert | Insert | Shift-Ctrl-V
&Delete | Delete | Del
---
Comment Rows | Comment selected rows | Ctrlcmd-3
Comment Cells | Comment cells with # | Ctrlcmd-Shift-3
Uncomment Rows | Uncomment selected rows | Ctrlcmd-4
Uncomment Cells | Uncomment cells with # | Ctrlcmd-Shift-4
---
Insert Cells | Insert Cells | Ctrlcmd-Shift-I
Delete Cells | Delete Cells | Ctrlcmd-Shift-D
Insert Rows | Insert Rows | Ctrlcmd-I
Delete Rows | Delete Rows | Ctrlcmd-D
[Tools]
Content Assistance (Ctrl-Space or Ctrl-Alt-Space) | Show possible keyword and variable completions | | | POSITION-70
"""
class EditorPlugin(Plugin, TreeAwarePluginMixin):
"""The default editor plugin.
This plugin implements editors for the various items of Robot Framework
test data.
"""
def __init__(self, application):
Plugin.__init__(self, application)
self._tab = None
self.grid_popup_creator = PopupCreator()
self._creator = EditorCreator(self.register_editor)
self._editor = None
def enable(self):
self._creator.register_editors()
self._show_editor()
self.register_actions(action_info_collection(_EDIT, self._tab, self._tab))
self.subscribe(self.on_tree_item_selected, RideTreeSelection)
self.subscribe(self.on_tab_changed, RideNotebookTabChanged)
self.subscribe(self.on_tab_changing, RideNotebookTabChanging)
self.subscribe(self.on_save_to_model, RideSaving)
self.subscribe(self.on_file_deleted, RideDataFileRemoved)
self.add_self_as_tree_aware_plugin()
def disable(self):
self.remove_self_from_tree_aware_plugins()
self.unsubscribe_all()
self.delete_tab(self._tab)
wx.CallLater(500, self.unregister_actions())
self._tab = None
self._editor = None
def is_focused(self):
return self.tab_is_visible(self._tab)
def highlight_cell(self, obj, row, column):
self.show()
self._editor.highlight_cell(obj, row, column)
def highlight(self, text):
self.show()
self._editor.highlight(text)
def show(self):
self.show_tab(self._tab)
def register_context_menu_hook_to_grid(self, hook):
""" Used to register own items to grid's right click context menu
hook is called with current selection (list of list containing
values) and it is expected to return list of PopupMenuItem.
If user selects one of the returned PopupMenuItem, related function
is called with one argument, the wx event.
"""
self.grid_popup_creator.add_hook(hook)
def unregister_context_menu_hook_to_grid(self, hook):
self.grid_popup_creator.remove_hook(hook)
def _show_editor(self):
if not self._tab:
self._tab = _EditorTab(self)
self.add_tab(self._tab, 'Edit', allow_closing=False)
if self.is_focused():
self._editor = self._create_editor()
self._tab.show_editor(self._editor)
def _create_editor(self):
return self._creator.editor_for(self, self._tab, self.tree)
def on_tree_item_selected(self, message):
self._show_editor()
if not self.is_focused() and \
not self.is_focus_on_tree_aware_plugin() and \
(not message or not message.silent):
self._editor = self._create_editor()
self._tab.show_editor(self._editor)
self.show()
if self._editor:
self._editor.tree_item_selected(message.item)
def get_selected_datafile(self):
if self._editor and self._editor.controller:
return self._editor.controller.datafile
return Plugin.get_selected_datafile(self)
def on_open_editor(self, event):
_ = event
self._show_editor()
def on_tab_changed(self, message):
_ = message
self._show_editor()
def on_tab_changing(self, message):
if 'Edit' in message.oldtab:
self._tab.save()
def on_save_to_model(self, message):
_ = message
if self._tab:
self._tab.save()
def on_file_deleted(self, message):
_ = message
self._create_editor()
class _EditorTab(wx.Panel):
def __init__(self, plugin):
wx.Panel.__init__(self, plugin.notebook, style=wx.SUNKEN_BORDER)
self.plugin = plugin
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(self.sizer)
"""
self.SetBackgroundColour(Colour(200, 222, 40))
self.SetOwnBackgroundColour(Colour(200, 222, 40))
self.SetForegroundColour(Colour(7, 0, 70))
self.SetOwnForegroundColour(Colour(7, 0, 70))
"""
self.Refresh(True)
self.editor = None
def show_editor(self, editor):
if editor is None:
return
if editor is self.editor:
self.Show(True)
return
self.sizer.Clear()
self.editor = editor
self.sizer.Add(self.editor, 1, wx.ALL | wx.EXPAND)
self.Layout()
self.Show(True)
def hide_editor(self):
self.Show(False)
def on_save(self, event):
_ = event
self.plugin.save_selected_datafile()
def on_undo(self, event):
_ = event
self.editor.undo()
def on_redo(self, event):
_ = event
self.editor.redo()
def on_cut(self, event):
_ = event
self.editor.cut()
def on_copy(self, event):
_ = event
self.editor.copy()
def on_paste(self, event):
_ = event
self.editor.paste()
def on_insert(self, event):
_ = event
self.editor.insert()
def on_insert_cells(self, event):
_ = event
self.editor.insert_cells()
def on_delete_cells(self, event):
_ = event
# print("DEBUG init delete cells call")
self.editor.delete_cells()
def on_insert_rows(self, event):
_ = event
self.editor.insert_rows()
def on_delete_rows(self, event):
_ = event
wx.CallAfter(self.editor.delete_rows)
def on_delete(self, event):
_ = event
self.editor.delete()
def on_comment_rows(self, event):
_ = event
self.editor.comment_rows()
def on_uncomment_rows(self, event):
_ = event
self.editor.uncomment_rows()
def on_sharp_comment_rows(self, event):
_ = event
self.editor.sharp_comment_rows()
def on_sharp_uncomment_rows(self, event):
_ = event
self.editor.sharp_uncomment_rows()
def on_comment_cells(self, event):
_ = event
self.editor.comment_cells()
def on_uncomment_cells(self, event):
_ = event
self.editor.uncomment_cells()
def on_content_assistance(self, event):
_ = event
self.editor.show_content_assist()
def save(self, message=None):
_ = message
if self.editor:
self.editor.save()
def on_key(self, *args):
""" Intentional override """
pass | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/editor/__init__.py | 0.514156 | 0.185984 | __init__.py | pypi |
from ..robotapi import ALIAS_MARKER
def get_help(title):
return '\n'.join(_HELPS[title])
_HELPS = {}
_EXAMPLES = {
'ESCAPE': "Possible pipes in the value must be escaped with a backslash like '\\|'.",
'TAG': "Separate tags with a pipe character like 'tag | second tag | 3rd'.",
'FIXTURE': "Separate possible arguments with a pipe character like 'My Keyword | arg 1 | arg 2'.",
'TIMEOUT': ("Use time syntax like '1min 10s' or '2 hours' or give the value as seconds.\n"
"Before Robot v3.0.1 an optional message could have been specified like '3 minutes | My message here'."
""),
'ARGUMENTS': ("Specify the arguments separated with a pipe character like '${arg1} | ${arg2}'.\n"
"Default values are given using equal sign and the last argument can be a list variable.\n"
"Example: '${arg1} | ${arg2}=default value | @{rest}'.\n"
"Note. You can use variable shortcuts in this field."),
'ALIAS': ("Alias can be used to import same library multiple times with different names.\n"
"Alias is prepended with: "+ALIAS_MARKER+" . Note that since Robot v6.0, imports with old WITH NAME are"
" replaced by AS.")
}
current = None
for row in """
Scalar Variable
Give name and value of the variable.
List Variable
Give name and value of the variable. Input list variable items into separate cells.
Dictionary Variable
Give name and value of the variable. Input dictionary items into separate cells.
Individual items must be in format `key=value`
Library
Give name, optional arguments and optional alias of the library to import.
Separate multiple arguments with a pipe character like 'arg 1 | arg 2'.
%(ALIAS)s
Variables
Give path and optional arguments of the variable file to import.
Separate multiple arguments with a pipe character like 'arg 1 | arg 2'.
%(ESCAPE)s
Resource
Give path to the resource file to import.
Existing resources will be automatically loaded to the resource tree.
New resources must be created separately.
Documentation
Give the documentation.
Simple formatting like *bold* and _italic_ can be used.
Additionally, URLs are converted to clickable links.
Force Tags
These tags are set to all test cases in this test suite.
Inherited tags are not shown in this view.
%(TAG)s
%(ESCAPE)s
Default Tags
These tags are set to all test cases in this test suite unless test cases have their own tags.
%(TAG)s
%(ESCAPE)s
Tags
These tags are set to this test case in addition to Force Tags and they override possible Default Tags.
Inherited tags are not shown in this view.
%(TAG)s
%(ESCAPE)s
Suite Setup
This keyword is executed before executing any of the test cases or lower level suites.
%(FIXTURE)s
%(ESCAPE)s
Suite Teardown
This keyword is executed after all test cases and lower level suites have been executed.
%(FIXTURE)s
%(ESCAPE)s
Test Setup
This keyword is executed before every test case in this suite unless test cases override it.
%(FIXTURE)s
%(ESCAPE)s
Test Teardown
This keyword is executed after every test case in this suite unless test cases override it.
%(FIXTURE)s
%(ESCAPE)s
Setup
This keyword is executed before other keywords in this test case.
Overrides possible Test Setup set on the suite level.
%(FIXTURE)s
%(ESCAPE)s
Teardown
This keyword is executed after other keywords in this test case even if the test fails.
Overrides possible Test Teardown set on the suite level.
%(FIXTURE)s
%(ESCAPE)s
Test Template
Specifies the default template keyword used by tests in this suite.
The test cases will contain only data to use as arguments to that keyword.
Template
Specifies the template keyword to use.
The test itself will contain only data to use as arguments to that keyword.
Arguments
%(ARGUMENTS)s
%(ESCAPE)s
Return Value
Specify the return value. Use a pipe character to separate multiple values.
%(ESCAPE)s
Test Timeout
Maximum time test cases in this suite are allowed to execute before aborting them forcefully.
Can be overridden by individual test cases using Timeout setting.
%(TIMEOUT)s
Timeout
Maximum time this test/keyword is allowed to execute before aborting it forcefully.
With test cases this setting overrides Test Timeout set on the suite level.
%(TIMEOUT)s
Metadata
Give a name and a value for the suite metadata.
New Test Case
Give a name for the new test case.
New User Keyword
Give a name and arguments for the new user keyword.
%(ARGUMENTS)s
Copy User Keyword
Give a name for the new user keyword.
""".splitlines():
row = row.strip()
if not row:
current = None
elif current is None:
current = _HELPS.setdefault(row, [])
else:
current.append(row % _EXAMPLES) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/editor/dialoghelps.py | 0.559771 | 0.321074 | dialoghelps.py | pypi |
import os
import wx
from .. import robotapi, utils
class _AbstractValidator(wx.Validator):
"""Implements methods to keep wxPython happy and some helper methods."""
def Clone(self):
return self.__class__()
def TransferFromWindow(self):
return True
def TransferToWindow(self):
return True
def Validate(self, win):
value = self.Window.Value
error = self._validate(value)
if error:
self._show_error(error)
return False
return True
def _validate(self, value):
return NotImplemented
def _show_error(self, message, title="Validation Error"):
ret = wx.MessageBox(message, title, style=wx.ICON_ERROR)
self._set_focus_to_text_control(self.Window)
return ret
@staticmethod
def _set_focus_to_text_control(ctrl):
ctrl.SetFocus()
ctrl.SelectAll()
class TimeoutValidator(_AbstractValidator):
def _validate(self, value):
time_tokens = utils.split_value(value)
if not time_tokens:
return None
timestr = time_tokens[0]
try:
secs = utils.timestr_to_secs(timestr)
if secs <= 0:
raise ValueError("Timestring must be over zero")
time_tokens[0] = utils.secs_to_timestr(secs)
except ValueError as err:
if '${' not in timestr:
return str(err)
self._set_window_value(utils.join_value(time_tokens))
return None
def _set_window_value(self, value):
self.Window.SetValue(value)
class ArgumentTypes(object):
SCALAR, DEFAULT, LIST, DICT = range(1, 5)
class ArgumentsValidator(_AbstractValidator):
def _validate(self, args_str):
try:
types = [self._get_type(arg)
for arg in utils.split_value(args_str)]
except ValueError as e:
return "Invalid argument syntax '%s'" % str(e) # DEBUG was arg
return self._validate_argument_order(types)
@staticmethod
def _get_type(arg):
if robotapi.is_scalar_var(arg):
return ArgumentTypes.SCALAR
elif robotapi.is_scalar_var(arg.split("=")[0]):
return ArgumentTypes.DEFAULT
elif robotapi.is_list_var(arg):
return ArgumentTypes.LIST
elif robotapi.is_dict_var(arg):
return ArgumentTypes.DICT
else:
raise ValueError(arg)
@staticmethod
def _validate_argument_order(types):
prev = ArgumentTypes.SCALAR
for t in types:
if t < prev:
return ("List and scalar arguments must be before named and "
"dictionary arguments")
prev = t
return None
class NonEmptyValidator(_AbstractValidator):
def __init__(self, field_name):
_AbstractValidator.__init__(self)
self._field_name = field_name
def Clone(self):
return self.__class__(self._field_name)
def _validate(self, value):
if not value:
return "%s cannot be empty" % self._field_name
return None
class SuiteFileNameValidator(NonEmptyValidator):
def __init__(self, field_name, is_dir_type):
NonEmptyValidator.__init__(self, field_name)
self._is_dir_type = is_dir_type
def Clone(self):
return self.__class__(self._field_name, self._is_dir_type)
def _validate(self, value):
validity = NonEmptyValidator._validate(self, value)
if not self._is_dir_type() and not validity:
if value.lower() == '__init__':
return "Invalid suite file name \"%s\"" % value
return validity
class DirectoryExistsValidator(_AbstractValidator):
def _validate(self, value):
if not os.path.isdir(value):
return "Chosen directory must exist"
return None
class NewSuitePathValidator(_AbstractValidator):
def _validate(self, value):
path = os.path.normpath(value)
if os.path.exists(path):
return "Target file or directory must not exist"
parentdir, filename = os.path.split(path)
if "__init__" in filename:
parentdir = os.path.dirname(parentdir)
if not os.path.exists(parentdir):
return "Parent directory must exist"
return None
class _NameValidator(_AbstractValidator):
def __init__(self, controller, orig_name=None):
_AbstractValidator.__init__(self)
self._controller = controller
self._orig_name = orig_name
def Clone(self):
return self.__class__(self._controller, self._orig_name)
def _validate(self, name):
if self._orig_name is not None and utils.eq(
name, self._orig_name, ignore=['_']):
return ''
return self._validation_method(name).error_message
@property
def _validation_method(self):
return NotImplemented
class TestCaseNameValidator(_NameValidator):
__test__ = False
@property
def _validation_method(self):
return self._controller.validate_test_name
class UserKeywordNameValidator(_NameValidator):
@property
def _validation_method(self):
return self._controller.validate_keyword_name
class ScalarVariableNameValidator(_NameValidator):
@property
def _validation_method(self):
return self._controller.validate_scalar_variable_name
class ListVariableNameValidator(_NameValidator):
@property
def _validation_method(self):
return self._controller.validate_list_variable_name
class DictionaryVariableNameValidator(_NameValidator):
@property
def _validation_method(self):
return self._controller.validate_dict_variable_name | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/validators/__init__.py | 0.696784 | 0.156298 | __init__.py | pypi |
import sys
import inspect
import types
from pubsub import pub
from typing import Type, Callable
from ..publish.messages import RideMessage
class _Publisher:
def __init__(self):
self.publisher = pub.getDefaultPublisher()
self.publisher.setListenerExcHandler(ListenerExceptionHandler())
@staticmethod
def _get_topic(topic_cls: Type[RideMessage]) -> str:
if inspect.isclass(topic_cls) and issubclass(topic_cls, RideMessage):
return topic_cls.topic()
raise TypeError('Expected topic type {}, actual {}.'.format(RideMessage, topic_cls))
@staticmethod
def _validate_listener(listener: Callable):
sig = inspect.signature(listener)
params = sig.parameters
error_msg = 'only 1 required param (message) is expected.'
assert len(params) == 1, 'Too many listener params, ' + error_msg
assert str(list(params.values())[0]) in ['message', 'data'], 'Invalid listener param, ' + error_msg
def subscribe(self, listener: Callable, topic: Type[RideMessage]):
""" The listener's param signature must be (message) """
self._validate_listener(listener)
self.publisher.subscribe(listener, self._get_topic(topic))
def publish(self, topic: Type[RideMessage], message):
""" All subscribed listeners' param signatures have been guaranteed """
self.publisher.sendMessage(self._get_topic(topic), message=message)
def unsubscribe(self, listener: Callable, topic: Type[RideMessage]):
self.publisher.unsubscribe(listener, self._get_topic(topic))
def unsubscribe_all(self, obj=None):
""" If the given object's:
1. object method
2. class static function
3. class function
is subscribed into PUBLISHER, call this method to unsubscribe all its topics.
Unsubscribe all topics when input is None.
"""
def _listener_filter(listener):
_callable = listener.getCallable()
functions = [func for _, func in _get_members_safely(obj, inspect.isfunction)]
methods = [method for _, method in _get_members_safely(obj, inspect.ismethod)]
if _callable in functions or _callable in methods:
return True
_listener_filter = _listener_filter if obj is not None else None
self.publisher.unsubAll(listenerFilter=_listener_filter)
class ListenerExceptionHandler(pub.IListenerExcHandler):
def __call__(self, listener_id: str, topic_obj: pub.Topic):
from .messages import RideLogException
topic_name = topic_obj.getName()
if topic_name != RideLogException.topic():
error_msg = 'Error in listener: {}, topic: {}'.format(listener_id, topic_name)
log_message = RideLogException(message=error_msg,
exception=None, level='ERROR')
sys.stderr.write(log_message.__getattribute__('message'))
log_message.publish()
def _get_members_safely(obj, predicate=None):
"""Return all members of an object as (name, value) pairs sorted by name.
Optionally, only return members that satisfy a given predicate.
Copied from inspect.getmembers().
Added protection logic to bypass unexpected exceptions in object attribute iterations.
"""
if inspect.isclass(obj):
mro = (obj,) + inspect.getmro(obj)
else:
mro = ()
results = []
processed = set()
names = dir(obj)
# :dd any DynamicClassAttributes to the list of names if object is a class;
# this may result in duplicate entries if, for example, a virtual
# attribute with the same name as a DynamicClassAttribute exists
try:
for base in obj.__bases__:
for k, v in base.__dict__.items():
if isinstance(v, types.DynamicClassAttribute):
names.append(k)
except AttributeError:
pass
for key in names:
# First try to get the value via getattr. Some descriptors don't
# like calling their __get__ (see bug #1785), so fall back to
# looking in the __dict__.
try:
value = getattr(obj, key)
# handle the duplicate key
if key in processed:
raise AttributeError
except Exception as e:
""" UPDATED HERE: Catch all types of exceptions. """
if isinstance(e, AttributeError):
""" UPDATED HERE: Use old logic if exception is AttributeError. """
for base in mro:
if key in base.__dict__:
value = base.__dict__[key]
break
else:
# could be a (currently) missing slot member, or a buggy
# __dir__; discard and move on
continue
else:
""" UPDATED HERE: Ignore this attribute when other types of exception raised. """
continue
if not predicate or predicate(value):
results.append((key, value))
processed.add(key)
results.sort(key=lambda pair: pair[0])
return results
"""Global `Publisher` instance for subscribing to and unsubscribing from RideMessages."""
PUBLISHER = _Publisher() | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/publish/publisher.py | 0.578091 | 0.22194 | publisher.py | pypi |
import inspect
import sys
import traceback
from .. import utils
class RideMessage:
"""Base class for all messages sent by RIDE.
:CVariables:
topic
Topic of this message. If not overridden, value is got from the class
name by lowercasing it, separating words with a dot and dropping possible
``Message`` from the end. For example classes ``MyExample`` and
``AnotherExampleMessage`` get titles ``my.example`` and
``another.example``, respectively.
data
Names of attributes this message provides. These must be given as
keyword arguments to `__init__` when an instance is created.
"""
_topic = None
data = []
def __init__(self, **kwargs):
"""Initializes message based on given keyword arguments.
Names of the given keyword arguments must match to names in `data`
class attribute, otherwise the initialization fails.
Must be called explicitly by subclass if overridden.
"""
if sorted(kwargs.keys()) != sorted(self.data):
raise TypeError('Argument mismatch, expected: %s' % self.data)
self.__dict__.update(kwargs)
@classmethod
def topic(cls):
if not cls._topic:
cls_name = cls.__name__
if cls_name.endswith('Message'):
cls_name = cls_name[:-len('Message')]
topic_name = utils.printable_name(cls_name, code_style=True).replace(' ', '.')
else:
topic_name = cls._topic
return topic_name.lower()
def publish(self):
"""Publishes the message.
All listeners that have subscribed to the topic of this message will be
called with this instance as an argument.
Notifications are sent sequentially. Due to the limitations of current
implementation, if any of the listeners raises an exception, subsequent
listeners will not get the notification.
"""
from robotide.publish.publisher import PUBLISHER
PUBLISHER.publish(self.__class__, self)
class RideLog(RideMessage):
"""This class represents a general purpose log message.
Subclasses of this be may be used to inform error conditions or to provide
some kind of debugging information.
"""
data = ['message', 'level', 'timestamp', 'notify_user']
class RideLogMessage(RideLog):
"""This class represents a general purpose log message.
This message may be used to inform error conditions or to provide
some kind of debugging information.
"""
data = ['message', 'level', 'timestamp', 'notify_user']
def __init__(self, message, level='INFO', notify_user=False):
"""Initializes a RIDE log message.
The log ``level`` has default value ``INFO`` and the ``timestamp``
is generated automatically.
"""
RideMessage.__init__(
self, message=message, level=level,
timestamp=utils.get_timestamp(), notify_user=notify_user)
class RideLogException(RideLog):
"""This class represents a general purpose log message with a traceback
appended to message text. Also, the original exception is included with
the message.
This message may be used to inform error conditions or to provide
some kind of debugging information.
"""
data = ['message', 'level', 'timestamp', 'exception', 'notify_user']
def __init__(self, message, exception, level='INFO', notify_user=False):
"""Initializes a RIDE log exception.
The log ``level`` has default value ``INFO`` and the ``timestamp``
is generated automatically. Message is automatically appended with
a traceback.
"""
_, _, exc_traceback = sys.exc_info()
if exc_traceback:
tb = traceback.extract_tb(exc_traceback)
message += '\n\nTraceback (most recent call last):\n%s\n%s' % \
(str(exception), ''.join(traceback.format_list(tb)))
RideMessage.__init__(
self, message=message, level=level, notify_user=notify_user,
timestamp=utils.get_timestamp(), exception=exception)
class RideParserLogMessage(RideMessage):
"""This class represents a general purpose log message.
This message may be used to inform parser errors and to provide
some kind of debugging information.
"""
data = ['message', 'level', 'timestamp', 'notify_user']
def __init__(self, message, level='', notify_user=False):
"""Initializes a RIDE log message.
The log ``level`` has default value ``WARN`` and the ``timestamp``
is generated automatically.
"""
RideMessage.__init__(
self, message=message, level=level,
timestamp=utils.get_timestamp(), notify_user=notify_user)
class RideInputValidationError(RideMessage):
"""Sent whenever user input is invalid."""
data = ['message']
class RideModificationPrevented(RideMessage):
"""Sent whenever modifying command is prevented for some reason"""
data = ['controller']
class RideSettingsChanged(RideMessage):
"""Sent when settings are changed
keys is a tuple of key names. For example, if the "Grid Colors" section
was modified the keys would be ("Grid Colors"), or a specific plugin
setting might be ("Plugin", "Preview", "format").
`old` and `new` contain the old and the new value of the setting.
"""
data = ['keys', 'old', 'new']
class RideExecuteSpecXmlImport(RideMessage):
"""Sent whenever spec xml import is requested"""
class RideTreeSelection(RideMessage):
"""Sent whenever user selects a node from the tree."""
data = ['node', 'item', 'silent']
class RideOpenVariableDialog(RideMessage):
"""Sent when variable dialog is requested to be open"""
data = ['controller']
class RideTestExecutionStarted(RideMessage):
"""Sent whenever new test execution is started."""
data = ['results']
class RideTestSelectedForRunningChanged(RideMessage):
"""Sent whenever a test is selected or unselected from the tree."""
data = ['tests']
class RideTestRunning(RideMessage):
"""Sent whenever RIDE is starting to run a test case."""
data = ['item']
class RideTestPaused(RideMessage):
"""Sent whenever RIDE is running a test case and paused."""
data = ['item']
class RideTestPassed(RideMessage):
"""Sent whenever RIDE has executed a test case, and it passed."""
data = ['item']
class RideTestFailed(RideMessage):
"""Sent whenever RIDE has executed a test case, and it failed."""
data = ['item']
class RideTestSkipped(RideMessage):
"""Sent whenever RIDE has executed a test case, and it was skipped."""
data = ['item']
class RideTestStopped(RideMessage):
"""Sent whenever RIDE was executing a test case, and it was stopped or aborted."""
data = ['item']
class RideNotebookTabChanging(RideMessage):
"""Sent when the notebook tab change has started.
Subscribing to this event allows the listener to do something before the
tab has actually changed in the UI.
"""
data = ['oldtab', 'newtab']
class RideNotebookTabChanged(RideMessage):
"""Sent after the notebook tab change has completed."""
pass
class RideSaving(RideMessage):
"""Sent when user selects Save from File menu or via shortcut.
This is used for example to store current changes from editor to data
model, to guarantee that all changes are really saved."""
data = ['path', 'datafile']
class RideBeforeSaving(RideMessage):
"""Sent before files are going to be saved."""
pass
class RideSaved(RideMessage):
"""Sent after the file has been actually saved to disk."""
data = ['path']
class RideSaveAll(RideMessage):
"""Sent when user selects ``Save All`` from ``File`` menu or via shortcut."""
pass
class RideDataDirtyCleared(RideMessage):
"""Sent when datafiles dirty marking is cleared
datafile has been saved and datafile in memory equals the serialized one.
"""
data = ['datafile']
class RideNewProject(RideMessage):
"""Sent when a new project has been created."""
data = ['path', 'datafile']
class RideClosing(RideMessage):
"""Sent when user selects ``Quit`` from ``File`` menu or via shortcut."""
pass
class RideOpenSuite(RideMessage):
"""Sent when a new suite has finished loading."""
data = ['path', 'datafile']
class RideOpenResource(RideMessage):
"""Sent when a new resource has finished loading."""
data = ['path', 'datafile']
class RideSelectResource(RideMessage):
"""Sent when a resource should be selected."""
data = ['item']
class RideDataChanged(RideMessage):
"""Base class for all messages notifying that data in model has changed."""
pass
class RideFileNameChanged(RideDataChanged):
"""Sent after test suite or resource file is renamed"""
data = ['datafile', 'old_filename']
class RideDataFileRemoved(RideDataChanged):
data = ['path', 'datafile']
class RideSuiteAdded(RideDataChanged):
data = ['parent', 'suite']
class RideInitFileRemoved(RideDataChanged):
data = ['path', 'datafile']
class RideImportSetting(RideDataChanged):
"""Base class for all messages about changes to import settings."""
data = ['datafile', 'type', 'import_controller']
def is_resource(self):
return self.type == 'resource'
@property
def name(self):
return self.import_controller.name
class _RideExcludes(RideMessage):
data = ['old_controller', 'new_controller']
class RideIncludesChanged(_RideExcludes):
pass
class RideExcludesChanged(_RideExcludes):
pass
class RideImportSettingAdded(RideImportSetting):
"""Sent whenever an import setting is added.
``datafile`` is the suite or resource file whose imports have changed,
``type`` is either ``resource``, ``library``, or ``variables``.
"""
pass
class RideImportSettingChanged(RideImportSetting):
"""Sent whenever a value of import setting is changed.
``datafile`` is the suite or resource file whose imports have changed,
``type`` is either ``resource``, ``library``, or ``variables``.
"""
pass
class RideImportSettingRemoved(RideImportSetting):
"""Sent whenever a value of import setting is removed.
``datafile`` is the suite or resource file whose imports have removed,
``type`` is either ``resource``, ``library``, or ``variables``.
"""
pass
class RideDataChangedToDirty(RideDataChanged):
"""Sent when datafile changes from serialized version"""
data = ['datafile']
class RideDataFileSet(RideDataChanged):
"""Set when a whole datafile is replaced with new one in a controller
"""
data = ['item']
class RideUserKeyword(RideDataChanged):
"""Base class for all messages about changes to any user keyword."""
pass
class RideUserKeywordAdded(RideUserKeyword):
"""Sent when a new user keyword is added to a suite or resource."""
data = ['datafile', 'name', 'item']
class RideUserKeywordRemoved(RideUserKeyword):
"""Sent when a user keyword is removed from a suite or resource."""
data = ['datafile', 'name', 'item']
class RideItem(RideDataChanged):
"""Base class for all messages about changes to any data item."""
data = ['item']
class RideItemStepsChanged(RideItem):
""""""
pass
class RideItemNameChanged(RideItem):
""""""
data = ['item', 'old_name']
class RideItemSettingsChanged(RideItem):
""""""
pass
class RideTestCaseAdded(RideDataChanged):
"""Sent when a new test case is added to a suite."""
data = ['datafile', 'name', 'item']
class RideTestCaseRemoved(RideDataChanged):
"""Sent when a test case is removed from a suite."""
data = ['datafile', 'name', 'item']
class RideItemMovedUp(RideDataChanged):
"""Sent when an item (test, keyword, variable) is moved up."""
data = ['item']
class RideItemMovedDown(RideDataChanged):
"""Sent when an item (test, keyword, variable) is moved down."""
data = ['item']
class RideVariableAdded(RideDataChanged):
"""Sent when a new variable is added to a suite."""
data = ['datafile', 'name', 'item', 'index']
class RideVariableRemoved(RideDataChanged):
"""Sent when a variable is removed from a suite."""
data = ['datafile', 'name', 'item']
class RideVariableMovedUp(RideItemMovedUp):
"""Sent when a variable is moved up
item is the item that has been moved up
other is the item that was swapped down
"""
data = ['item', 'other']
class RideVariableMovedDown(RideItemMovedDown):
"""Sent when a variable is moved down
item is the item that has been moved down
other is the item that was swapped up
"""
data = ['item', 'other']
class RideVariableUpdated(RideDataChanged):
"""Sent when the state of a variable is changed"""
data = ['item']
class RideOpenTagSearch(RideMessage):
""" Sent we when want to open Search Tags """
data = ['includes', 'excludes']
class RideTreeAwarePluginAdded(RideMessage):
data = ['plugin']
__all__ = [name for name, cls in globals().items()
if inspect.isclass(cls) and issubclass(cls, RideMessage)] | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/publish/messages.py | 0.703651 | 0.274832 | messages.py | pypi |
try:
unicode
except NameError:
unicode = str
# Return codes from Robot and Rebot.
# RC below 250 is the number of failed critical tests and exactly 250
# means that number or more such failures.
INFO_PRINTED = 251 # --help or --version
DATA_ERROR = 252 # Invalid data or cli args
STOPPED_BY_USER = 253 # KeyboardInterrupt or SystemExit
FRAMEWORK_ERROR = 255 # Unexpected error
class RobotError(Exception):
"""Base class for Robot Framework errors.
Do not raise this method but use more specific errors instead.
"""
def __init__(self, message='', details=''):
Exception.__init__(self, message)
self.details = details
@property
def message(self):
return unicode(self)
class FrameworkError(RobotError):
"""Can be used when the core framework goes to unexpected state.
It is good to explicitly raise a FrameworkError if some framework
component is used incorrectly. This is pretty much same as
'Internal Error' and should of course never happen.
"""
class DataError(RobotError):
"""Used when the provided test data is invalid.
DataErrors are not caught by keywords that run other keywords
(e.g. `Run Keyword And Expect Error`).
"""
class VariableError(DataError):
"""Used when variable does not exist.
VariableErrors are caught by keywords that run other keywords
(e.g. `Run Keyword And Expect Error`).
"""
class KeywordError(DataError):
"""Used when no keyword is found or there is more than one match.
KeywordErrors are caught by keywords that run other keywords
(e.g. `Run Keyword And Expect Error`).
"""
class TimeoutError(RobotError):
"""Used when a test or keyword timeout occurs.
This exception is handled specially so that execution of the
current test is always stopped immediately and it is not caught by
keywords executing other keywords (e.g. `Run Keyword And Expect
Error`).
"""
def __init__(self, message='', test_timeout=True):
RobotError.__init__(self, message)
self.test_timeout = test_timeout
@property
def keyword_timeout(self):
return not self.test_timeout
class Information(RobotError):
"""Used by argument parser with --help or --version."""
class ExecutionStatus(RobotError):
"""Base class for exceptions communicating status in test execution."""
def __init__(self, message, test_timeout=False, keyword_timeout=False,
syntax=False, exit=False, continue_on_failure=False,
return_value=None):
if '\r\n' in message:
message = message.replace('\r\n', '\n')
from robotide.lib.robot.utils import cut_long_message
RobotError.__init__(self, cut_long_message(message))
self.test_timeout = test_timeout
self.keyword_timeout = keyword_timeout
self.syntax = syntax
self.exit = exit
self._continue_on_failure = continue_on_failure
self.return_value = return_value
@property
def timeout(self):
return self.test_timeout or self.keyword_timeout
@property
def dont_continue(self):
return self.timeout or self.syntax or self.exit
@property
def continue_on_failure(self):
return self._continue_on_failure
@continue_on_failure.setter
def continue_on_failure(self, continue_on_failure):
self._continue_on_failure = continue_on_failure
for child in getattr(self, '_errors', []):
if child is not self:
child.continue_on_failure = continue_on_failure
def can_continue(self, teardown=False, templated=False, dry_run=False):
if dry_run:
return True
if self.syntax or self.exit or self.test_timeout:
return False
if templated:
return True
if self.keyword_timeout:
return False
if teardown:
return True
return self.continue_on_failure
def get_errors(self):
return [self]
@property
def status(self):
return 'FAIL'
class ExecutionFailed(ExecutionStatus):
"""Used for communicating failures in test execution."""
class HandlerExecutionFailed(ExecutionFailed):
def __init__(self, details):
error = details.error
timeout = isinstance(error, TimeoutError)
test_timeout = timeout and error.test_timeout
keyword_timeout = timeout and error.keyword_timeout
syntax = (isinstance(error, DataError)
and not isinstance(error, (KeywordError, VariableError)))
exit_on_failure = self._get(error, 'EXIT_ON_FAILURE')
continue_on_failure = self._get(error, 'CONTINUE_ON_FAILURE')
ExecutionFailed.__init__(self, details.message, test_timeout,
keyword_timeout, syntax, exit_on_failure,
continue_on_failure)
self.full_message = details.message
self.traceback = details.traceback
def _get(self, error, attr):
return bool(getattr(error, 'ROBOT_' + attr, False))
class ExecutionFailures(ExecutionFailed):
def __init__(self, errors, message=None):
message = message or self._format_message([e.message for e in errors])
ExecutionFailed.__init__(self, message, **self._get_attrs(errors))
self._errors = errors
def _format_message(self, messages):
if len(messages) == 1:
return messages[0]
prefix = 'Several failures occurred:'
if any(msg.startswith('*HTML*') for msg in messages):
prefix = '*HTML* ' + prefix
messages = self._format_html_messages(messages)
return '\n\n'.join(
[prefix] +
['%d) %s' % (i, m) for i, m in enumerate(messages, start=1)]
)
def _format_html_messages(self, messages):
from robotide.lib.robot.utils import html_escape
for msg in messages:
if msg.startswith('*HTML*'):
yield msg[6:].lstrip()
else:
yield html_escape(msg)
def _get_attrs(self, errors):
return {
'test_timeout': any(e.test_timeout for e in errors),
'keyword_timeout': any(e.keyword_timeout for e in errors),
'syntax': any(e.syntax for e in errors),
'exit': any(e.exit for e in errors),
'continue_on_failure': all(e.continue_on_failure for e in errors)
}
def get_errors(self):
return self._errors
class UserKeywordExecutionFailed(ExecutionFailures):
def __init__(self, run_errors=None, teardown_errors=None):
errors = self._get_active_errors(run_errors, teardown_errors)
message = self._get_message(run_errors, teardown_errors)
ExecutionFailures.__init__(self, errors, message)
if run_errors and not teardown_errors:
self._errors = run_errors.get_errors()
else:
self._errors = [self]
def _get_active_errors(self, *errors):
return [err for err in errors if err]
def _get_message(self, run_errors, teardown_errors):
run_msg = run_errors.message if run_errors else ''
td_msg = teardown_errors.message if teardown_errors else ''
if not td_msg:
return run_msg
if not run_msg:
return 'Keyword teardown failed:\n%s' % td_msg
return '%s\n\nAlso keyword teardown failed:\n%s' % (run_msg, td_msg)
class ExecutionPassed(ExecutionStatus):
"""Base class for all exceptions communicating that execution passed.
Should not be raised directly, but more detailed exceptions used instead.
"""
def __init__(self, message=None, **kwargs):
ExecutionStatus.__init__(self, message or self._get_message(), **kwargs)
self._earlier_failures = []
def _get_message(self):
from robotide.lib.robot.utils import printable_name
return ("Invalid '%s' usage."
% printable_name(type(self).__name__, code_style=True))
def set_earlier_failures(self, failures):
if failures:
self._earlier_failures = list(failures) + self._earlier_failures
@property
def earlier_failures(self):
if not self._earlier_failures:
return None
return ExecutionFailures(self._earlier_failures)
@property
def status(self):
return 'PASS' if not self._earlier_failures else 'FAIL'
class PassExecution(ExecutionPassed):
"""Used by 'Pass Execution' keyword."""
def __init__(self, message):
ExecutionPassed.__init__(self, message)
class ContinueForLoop(ExecutionPassed):
"""Used by 'Continue For Loop' keyword."""
class ExitForLoop(ExecutionPassed):
"""Used by 'Exit For Loop' keyword."""
class ReturnFromKeyword(ExecutionPassed):
"""Used by 'Return From Keyword' keyword."""
def __init__(self, return_value=None, failures=None):
ExecutionPassed.__init__(self, return_value=return_value)
if failures:
self.set_earlier_failures(failures)
class RemoteError(RobotError):
"""Used by Remote library to report remote errors."""
def __init__(self, message='', details='', fatal=False, continuable=False):
RobotError.__init__(self, message, details)
self.ROBOT_EXIT_ON_FAILURE = fatal
self.ROBOT_CONTINUE_ON_FAILURE = continuable | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/errors.py | 0.779783 | 0.317135 | errors.py | pypi |
from robotide.lib.robot.utils import is_list_like, is_dict_like, is_string, unic
class ListenerArguments(object):
def __init__(self, arguments):
self._arguments = arguments
self._version2 = None
self._version3 = None
def get_arguments(self, version):
if version == 2:
if self._version2 is None:
self._version2 = self._get_version2_arguments(*self._arguments)
return self._version2
else:
if self._version3 is None:
self._version3 = self._get_version3_arguments(*self._arguments)
return self._version3
def _get_version2_arguments(self, *arguments):
return arguments
def _get_version3_arguments(self, *arguments):
return arguments
@classmethod
def by_method_name(cls, name, arguments):
Arguments = {'start_suite': StartSuiteArguments,
'end_suite': EndSuiteArguments,
'start_test': StartTestArguments,
'end_test': EndTestArguments,
'start_keyword': StartKeywordArguments,
'end_keyword': EndKeywordArguments,
'log_message': MessageArguments,
'message': MessageArguments}.get(name, ListenerArguments)
return Arguments(arguments)
class MessageArguments(ListenerArguments):
def _get_version2_arguments(self, msg):
attributes = {'timestamp': msg.timestamp,
'message': msg.message,
'level': msg.level,
'html': 'yes' if msg.html else 'no'}
return attributes,
def _get_version3_arguments(self, msg):
return msg,
class _ListenerArgumentsFromItem(ListenerArguments):
_attribute_names = None
def _get_version2_arguments(self, item):
attributes = dict((name, self._get_attribute_value(item, name))
for name in self._attribute_names)
attributes.update(self._get_extra_attributes(item))
return item.name, attributes
def _get_attribute_value(self, item, name):
value = getattr(item, name)
return self._take_copy_of_mutable_value(value)
def _take_copy_of_mutable_value(self, value):
if is_dict_like(value):
return dict(value)
if is_list_like(value):
return list(value)
return value
def _get_extra_attributes(self, item):
return {}
def _get_version3_arguments(self, item):
return item.data, item.result
class StartSuiteArguments(_ListenerArgumentsFromItem):
_attribute_names = ('id', 'longname', 'doc', 'metadata', 'starttime')
def _get_extra_attributes(self, suite):
return {'tests': [t.name for t in suite.tests],
'suites': [s.name for s in suite.suites],
'totaltests': suite.test_count,
'source': suite.source or ''}
class EndSuiteArguments(StartSuiteArguments):
_attribute_names = ('id', 'longname', 'doc', 'metadata', 'starttime',
'endtime', 'elapsedtime', 'status', 'message')
def _get_extra_attributes(self, suite):
attrs = StartSuiteArguments._get_extra_attributes(self, suite)
attrs['statistics'] = suite.stat_message
return attrs
class StartTestArguments(_ListenerArgumentsFromItem):
_attribute_names = ('id', 'longname', 'doc', 'tags', 'starttime')
def _get_extra_attributes(self, test):
return {'critical': 'yes' if test.critical else 'no',
'template': test.template or ''}
class EndTestArguments(StartTestArguments):
_attribute_names = ('id', 'longname', 'doc', 'tags', 'starttime',
'endtime', 'elapsedtime', 'status', 'message')
class StartKeywordArguments(_ListenerArgumentsFromItem):
_attribute_names = ('kwname', 'libname', 'doc', 'assign', 'tags',
'starttime')
_types = {'kw': 'Keyword', 'setup': 'Setup', 'teardown': 'Teardown',
'for': 'For', 'foritem': 'For Item'}
def _get_extra_attributes(self, kw):
args = [a if is_string(a) else unic(a) for a in kw.args]
return {'args': args, 'type': self._types[kw.type]}
class EndKeywordArguments(StartKeywordArguments):
_attribute_names = ('kwname', 'libname', 'doc', 'args', 'assign', 'tags',
'starttime', 'endtime', 'elapsedtime', 'status') | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/output/listenerarguments.py | 0.679072 | 0.192255 | listenerarguments.py | pypi |
from robotide.lib.robot.errors import TimeoutError
from robotide.lib.robot.utils import get_error_details, py2to3
from .listenerarguments import ListenerArguments
from .logger import LOGGER
@py2to3
class ListenerMethods(object):
def __init__(self, method_name, listeners):
self._methods = []
self._method_name = method_name
if listeners:
self._register_methods(method_name, listeners)
def _register_methods(self, method_name, listeners):
for listener in listeners:
method = getattr(listener, method_name)
if method:
self._methods.append(ListenerMethod(method, listener))
def __call__(self, *args):
if self._methods:
args = ListenerArguments.by_method_name(self._method_name, args)
for method in self._methods:
method(args.get_arguments(method.version))
def __nonzero__(self):
return bool(self._methods)
class LibraryListenerMethods(object):
def __init__(self, method_name):
self._method_stack = []
self._method_name = method_name
def new_suite_scope(self):
self._method_stack.append([])
def discard_suite_scope(self):
self._method_stack.pop()
def register(self, listeners, library):
methods = self._method_stack[-1]
for listener in listeners:
method = getattr(listener, self._method_name)
if method:
info = ListenerMethod(method, listener, library)
methods.append(info)
def unregister(self, library):
methods = [m for m in self._method_stack[-1] if m.library is not library]
self._method_stack[-1] = methods
def __call__(self, *args, **conf):
methods = self._get_methods(**conf)
if methods:
args = ListenerArguments.by_method_name(self._method_name, args)
for method in methods:
method(args.get_arguments(method.version))
def _get_methods(self, library=None):
if not (self._method_stack and self._method_stack[-1]):
return []
methods = self._method_stack[-1]
if library:
return [m for m in methods if m.library is library]
return methods
class ListenerMethod(object):
# Flag to avoid recursive listener calls.
called = False
def __init__(self, method, listener, library=None):
self.method = method
self.listener_name = listener.name
self.version = listener.version
self.library = library
def __call__(self, args):
if self.called:
return
try:
ListenerMethod.called = True
self.method(*args)
except TimeoutError:
# Propagate possible timeouts:
# https://github.com/robotframework/robotframework/issues/2763
raise
except:
message, details = get_error_details()
LOGGER.error("Calling method '%s' of listener '%s' failed: %s"
% (self.method.__name__, self.listener_name, message))
LOGGER.info("Details:\n%s" % details)
finally:
ListenerMethod.called = False | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/output/listenermethods.py | 0.682679 | 0.161353 | listenermethods.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.model import Message as BaseMessage
from robotide.lib.robot.utils import get_timestamp, is_unicode, unic
LEVELS = {
'NONE' : 6,
'FAIL' : 5,
'ERROR' : 4,
'WARN' : 3,
'INFO' : 2,
'DEBUG' : 1,
'TRACE' : 0,
}
class AbstractLogger(object):
def __init__(self, level='TRACE'):
self._is_logged = IsLogged(level)
def set_level(self, level):
return self._is_logged.set_level(level)
def trace(self, msg):
self.write(msg, 'TRACE')
def debug(self, msg):
self.write(msg, 'DEBUG')
def info(self, msg):
self.write(msg, 'INFO')
def warn(self, msg):
self.write(msg, 'WARN')
def fail(self, msg):
html = False
if msg.startswith("*HTML*"):
html = True
msg = msg[6:].lstrip()
self.write(msg, 'FAIL', html)
def error(self, msg):
self.write(msg, 'ERROR')
def write(self, message, level, html=False):
self.message(Message(message, level, html))
def message(self, msg):
raise NotImplementedError(self.__class__)
class Message(BaseMessage):
__slots__ = ['_message']
def __init__(self, message, level='INFO', html=False, timestamp=None):
message = self._normalize_message(message)
level, html = self._get_level_and_html(level, html)
timestamp = timestamp or get_timestamp()
BaseMessage.__init__(self, message, level, html, timestamp)
def _normalize_message(self, msg):
if callable(msg):
return msg
if not is_unicode(msg):
msg = unic(msg)
if '\r\n' in msg:
msg = msg.replace('\r\n', '\n')
return msg
def _get_level_and_html(self, level, html):
level = level.upper()
if level == 'HTML':
return 'INFO', True
if level not in LEVELS:
raise DataError("Invalid log level '%s'." % level)
return level, html
@property
def message(self):
self.resolve_delayed_message()
return self._message
@message.setter
def message(self, message):
self._message = message
def resolve_delayed_message(self):
if callable(self._message):
self._message = self._message()
class IsLogged(object):
def __init__(self, level):
self._str_level = level
self._int_level = self._level_to_int(level)
def __call__(self, level):
return self._level_to_int(level) >= self._int_level
def set_level(self, level):
old = self._str_level.upper()
self.__init__(level)
return old
def _level_to_int(self, level):
try:
return LEVELS[level.upper()]
except KeyError:
raise DataError("Invalid log level '%s'." % level)
class AbstractLoggerProxy(object):
_methods = None
_no_method = lambda *args: None
def __init__(self, logger, method_names=None, prefix=None):
self.logger = logger
for name in method_names or self._methods:
setattr(self, name, self._get_method(logger, name, prefix))
def _get_method(self, logger, name, prefix):
for method_name in self._get_method_names(name, prefix):
if hasattr(logger, method_name):
return getattr(logger, method_name)
return self._no_method
def _get_method_names(self, name, prefix):
names = [name, self._toCamelCase(name)] if '_' in name else [name]
if prefix:
names += [prefix + name for name in names]
return names
def _toCamelCase(self, name):
parts = name.split('_')
return ''.join([parts[0]] + [part.capitalize() for part in parts[1:]]) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/output/loggerhelper.py | 0.621771 | 0.170957 | loggerhelper.py | pypi |
from robotide.lib.robot.errors import DataError, VariableError
from robotide.lib.robot.utils import (DotDict, is_dict_like, is_list_like, NormalizedDict,
type_name)
from .isvar import validate_var
from .notfound import variable_not_found
from .tablesetter import VariableTableValueBase
class VariableStore(object):
def __init__(self, variables):
self.data = NormalizedDict(ignore='_')
self._variables = variables
def resolve_delayed(self):
for name, value in list(self.data.items()):
try:
self._resolve_delayed(name, value)
except DataError:
pass
def _resolve_delayed(self, name, value):
if not self._is_resolvable(value):
return value
try:
self.data[name] = value.resolve(self._variables)
except DataError as err:
# Recursive resolving may have already removed variable.
if name in self:
self.remove(name)
value.report_error(err)
variable_not_found('${%s}' % name, self.data,
"Variable '${%s}' not found." % name)
return self.data[name]
def _is_resolvable(self, value):
try: # isinstance can throw an exception in ironpython and jython
return isinstance(value, VariableTableValueBase)
except Exception:
return False
def __getitem__(self, name):
return self._resolve_delayed(name, self.data[name])
def update(self, store):
self.data.update(store.data)
def clear(self):
self.data.clear()
def add(self, name, value, overwrite=True, decorated=True):
if decorated:
name, value = self._undecorate(name, value)
if overwrite or name not in self.data:
self.data[name] = value
def _undecorate(self, name, value):
validate_var(name)
if name[0] == '@':
if not is_list_like(value):
self._raise_cannot_set_type(name, value, 'list')
value = list(value)
if name[0] == '&':
if not is_dict_like(value):
self._raise_cannot_set_type(name, value, 'dictionary')
value = DotDict(value)
return name[2:-1], value
def _raise_cannot_set_type(self, name, value, expected):
raise VariableError("Cannot set variable '%s': Expected %s-like value, "
"got %s." % (name, expected, type_name(value)))
def remove(self, name):
if name in self.data:
self.data.pop(name)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __contains__(self, name):
return name in self.data
def as_dict(self, decoration=True):
if decoration:
variables = (self._decorate(name, self[name]) for name in self)
else:
variables = self.data
return NormalizedDict(variables, ignore='_')
def _decorate(self, name, value):
if is_dict_like(value):
name = '&{%s}' % name
elif is_list_like(value):
name = '@{%s}' % name
else:
name = '${%s}' % name
return name, value | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/store.py | 0.504639 | 0.195057 | store.py | pypi |
import os
import tempfile
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.output import LOGGER
from robotide.lib.robot.utils import abspath, find_file, get_error_details, NormalizedDict
from .variables import Variables
class VariableScopes(object):
def __init__(self, settings):
self._global = GlobalVariables(settings)
self._suite = None
self._test = None
self._scopes = [self._global]
self._variables_set = SetVariables()
@property
def current(self):
return self._scopes[-1]
@property
def _all_scopes(self):
return reversed(self._scopes)
@property
def _scopes_until_suite(self):
for scope in self._all_scopes:
yield scope
if scope is self._suite:
break
@property
def _scopes_until_test(self):
for scope in self._scopes_until_suite:
yield scope
if scope is self._test:
break
def start_suite(self):
self._suite = self._global.copy()
self._scopes.append(self._suite)
self._variables_set.start_suite()
self._variables_set.update(self._suite)
def end_suite(self):
self._scopes.pop()
self._suite = self._scopes[-1] if len(self._scopes) > 1 else None
self._variables_set.end_suite()
def start_test(self):
self._test = self._suite.copy()
self._scopes.append(self._test)
self._variables_set.start_test()
def end_test(self):
self._scopes.pop()
self._test = None
self._variables_set.end_test()
def start_keyword(self):
kw = self._suite.copy()
self._variables_set.start_keyword()
self._variables_set.update(kw)
self._scopes.append(kw)
def end_keyword(self):
self._scopes.pop()
self._variables_set.end_keyword()
def __getitem__(self, name):
return self.current[name]
def __setitem__(self, name, value):
self.current[name] = value
def __contains__(self, name):
return name in self.current
def replace_list(self, items, replace_until=None, ignore_errors=False):
return self.current.replace_list(items, replace_until, ignore_errors)
def replace_scalar(self, items, ignore_errors=False):
return self.current.replace_scalar(items, ignore_errors)
def replace_string(self, string, ignore_errors=False):
return self.current.replace_string(string, ignore_errors=ignore_errors)
def set_from_file(self, path, args, overwrite=False):
variables = None
for scope in self._scopes_until_suite:
if variables is None:
variables = scope.set_from_file(path, args, overwrite)
else:
scope.set_from_file(variables, overwrite=overwrite)
def set_from_variable_table(self, variables, overwrite=False):
for scope in self._scopes_until_suite:
scope.set_from_variable_table(variables, overwrite)
def resolve_delayed(self):
for scope in self._scopes_until_suite:
scope.resolve_delayed()
def set_global(self, name, value):
for scope in self._all_scopes:
name, value = self._set_global_suite_or_test(scope, name, value)
self._variables_set.set_global(name, value)
def _set_global_suite_or_test(self, scope, name, value):
scope[name] = value
# Avoid creating new list/dict objects in different scopes.
if name[0] != '$':
name = '$' + name[1:]
value = scope[name]
return name, value
def set_suite(self, name, value, top=False, children=False):
if top:
self._scopes[1][name] = value
return
for scope in self._scopes_until_suite:
name, value = self._set_global_suite_or_test(scope, name, value)
if children:
self._variables_set.set_suite(name, value)
def set_test(self, name, value):
if self._test is None:
raise DataError('Cannot set test variable when no test is started.')
for scope in self._scopes_until_test:
name, value = self._set_global_suite_or_test(scope, name, value)
self._variables_set.set_test(name, value)
def set_keyword(self, name, value):
self.current[name] = value
self._variables_set.set_keyword(name, value)
def as_dict(self, decoration=True):
return self.current.as_dict(decoration=decoration)
class GlobalVariables(Variables):
def __init__(self, settings):
Variables.__init__(self)
self._set_cli_variables(settings)
self._set_built_in_variables(settings)
def _set_cli_variables(self, settings):
for path, args in settings.variable_files:
try:
path = find_file(path, file_type='Variable file')
self.set_from_file(path, args)
except:
msg, details = get_error_details()
LOGGER.error(msg)
LOGGER.info(details)
for varstr in settings.variables:
try:
name, value = varstr.split(':', 1)
except ValueError:
name, value = varstr, ''
self['${%s}' % name] = value
def _set_built_in_variables(self, settings):
for name, value in [('${TEMPDIR}', abspath(tempfile.gettempdir())),
('${EXECDIR}', abspath('.')),
('${/}', os.sep),
('${:}', os.pathsep),
('${\\n}', os.linesep),
('${SPACE}', ' '),
('${True}', True),
('${False}', False),
('${None}', None),
('${null}', None),
('${OUTPUT_DIR}', settings.output_directory),
('${OUTPUT_FILE}', settings.output or 'NONE'),
('${REPORT_FILE}', settings.report or 'NONE'),
('${LOG_FILE}', settings.log or 'NONE'),
('${DEBUG_FILE}', settings.debug_file or 'NONE'),
('${LOG_LEVEL}', settings.log_level),
('${PREV_TEST_NAME}', ''),
('${PREV_TEST_STATUS}', ''),
('${PREV_TEST_MESSAGE}', '')]:
self[name] = value
class SetVariables(object):
def __init__(self):
self._suite = None
self._test = None
self._scopes = []
def start_suite(self):
if not self._scopes:
self._suite = NormalizedDict(ignore='_')
else:
self._suite = self._scopes[-1].copy()
self._scopes.append(self._suite)
def end_suite(self):
self._scopes.pop()
self._suite = self._scopes[-1] if self._scopes else None
def start_test(self):
self._test = self._scopes[-1].copy()
self._scopes.append(self._test)
def end_test(self):
self._test = None
self._scopes.pop()
def start_keyword(self):
self._scopes.append(self._scopes[-1].copy())
def end_keyword(self):
self._scopes.pop()
def set_global(self, name, value):
for scope in self._scopes:
if name in scope:
scope.pop(name)
def set_suite(self, name, value):
self._suite[name] = value
def set_test(self, name, value):
for scope in reversed(self._scopes):
scope[name] = value
if scope is self._test:
break
def set_keyword(self, name, value):
self._scopes[-1][name] = value
def update(self, variables):
for name, value in self._scopes[-1].items():
variables[name] = value | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/scopes.py | 0.499756 | 0.169097 | scopes.py | pypi |
from contextlib import contextmanager
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import DotDict, is_string, split_from_equals, unic
from .isvar import validate_var
from .splitter import VariableSplitter
class VariableTableSetter(object):
def __init__(self, store):
self._store = store
def set(self, variables, overwrite=False):
for name, value in VariableTableReader().read(variables):
self._store.add(name, value, overwrite, decorated=False)
class VariableTableReader(object):
def read(self, variables):
for var in variables:
if not var:
continue
try:
yield self.get_name_and_value(var.name, var.value,
var.report_invalid_syntax)
except DataError as err:
var.report_invalid_syntax(err)
def get_name_and_value(self, name, value, error_reporter):
return name[2:-1], VariableTableValue(value, name, error_reporter)
def VariableTableValue(value, name, error_reporter=None):
validate_var(name)
VariableTableValue = {'$': ScalarVariableTableValue,
'@': ListVariableTableValue,
'&': DictVariableTableValue}[name[0]]
return VariableTableValue(value, error_reporter)
class VariableTableValueBase(object):
def __init__(self, values, error_reporter=None):
self._values = self._format_values(values)
self._error_reporter = error_reporter
self._resolving = False
def _format_values(self, values):
return values
def resolve(self, variables):
with self._avoid_recursion:
return self._replace_variables(self._values, variables)
@property
@contextmanager
def _avoid_recursion(self):
if self._resolving:
raise DataError('Recursive variable definition.')
self._resolving = True
try:
yield
finally:
self._resolving = False
def _replace_variables(self, value, variables):
raise NotImplementedError
def report_error(self, error):
if self._error_reporter:
self._error_reporter(unic(error))
class ScalarVariableTableValue(VariableTableValueBase):
def _format_values(self, values):
separator = None
if is_string(values):
values = [values]
elif values and values[0].startswith('SEPARATOR='):
separator = values[0][10:]
values = values[1:]
return separator, values
def _replace_variables(self, values, variables):
separator, values = values
# Avoid converting single value to string.
if self._is_single_value(separator, values):
return variables.replace_scalar(values[0])
if separator is None:
separator = ' '
separator = variables.replace_string(separator)
values = variables.replace_list(values)
return separator.join(unic(item) for item in values)
def _is_single_value(self, separator, values):
return (separator is None and len(values) == 1 and
not VariableSplitter(values[0]).is_list_variable())
class ListVariableTableValue(VariableTableValueBase):
def _replace_variables(self, values, variables):
return variables.replace_list(values)
class DictVariableTableValue(VariableTableValueBase):
def _format_values(self, values):
return list(self._yield_formatted(values))
def _yield_formatted(self, values):
for item in values:
if VariableSplitter(item).is_dict_variable():
yield item
else:
name, value = split_from_equals(item)
if value is None:
raise DataError("Dictionary item '%s' does not contain "
"'=' separator." % item)
yield name, value
def _replace_variables(self, values, variables):
try:
return DotDict(self._yield_replaced(values,
variables.replace_scalar))
except TypeError as err:
raise DataError('Creating dictionary failed: %s' % err)
def _yield_replaced(self, values, replace_scalar):
for item in values:
if isinstance(item, tuple):
key, values = item
yield replace_scalar(key), replace_scalar(values)
else:
for key, values in replace_scalar(item).items():
yield key, values | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/tablesetter.py | 0.599368 | 0.276849 | tablesetter.py | pypi |
from robotide.lib.robot.errors import DataError, VariableError
from robotide.lib.robot.output import LOGGER
from robotide.lib.robot.utils import (escape, is_dict_like, is_list_like, is_string,
type_name, unescape, unic)
from .splitter import VariableSplitter
class VariableReplacer(object):
def __init__(self, variables):
self._variables = variables
def replace_list(self, items, replace_until=None, ignore_errors=False):
"""Replaces variables from a list of items.
If an item in a list is a @{list} variable its value is returned.
Possible variables from other items are replaced using 'replace_scalar'.
Result is always a list.
'replace_until' can be used to limit replacing arguments to certain
index from the beginning. Used with Run Keyword variants that only
want to resolve some of the arguments in the beginning and pass others
to called keywords unmodified.
"""
items = list(items or [])
if replace_until is not None:
return self._replace_list_until(items, replace_until, ignore_errors)
return list(self._replace_list(items, ignore_errors))
def _replace_list_until(self, items, replace_until, ignore_errors):
# @{list} variables can contain more or less arguments than needed.
# Therefore we need to go through items one by one, and escape possible
# extra items we got.
replaced = []
while len(replaced) < replace_until and items:
replaced.extend(self._replace_list([items.pop(0)], ignore_errors))
if len(replaced) > replace_until:
replaced[replace_until:] = [escape(item)
for item in replaced[replace_until:]]
return replaced + items
def _replace_list(self, items, ignore_errors):
for item in items:
if self._cannot_have_variables(item):
yield unescape(item)
else:
for value in self._replace_list_item(item, ignore_errors):
yield value
def _replace_list_item(self, item, ignore_errors):
splitter = VariableSplitter(item)
try:
value = self._replace_scalar(item, splitter)
except DataError:
if ignore_errors:
return [item]
raise
if splitter.is_list_variable():
return value
return [value]
def replace_scalar(self, item, ignore_errors=False):
"""Replaces variables from a scalar item.
If the item is not a string it is returned as is. If it is a ${scalar}
variable its value is returned. Otherwise variables are replaced with
'replace_string'. Result may be any object.
"""
if self._cannot_have_variables(item):
return unescape(item)
return self._replace_scalar(item, ignore_errors=ignore_errors)
def _replace_scalar(self, item, splitter=None, ignore_errors=False):
if not splitter:
splitter = VariableSplitter(item)
if not splitter.identifier:
return unescape(item)
if not splitter.is_variable():
return self._replace_string(item, splitter, ignore_errors)
try:
return self._get_variable(splitter)
except DataError:
if ignore_errors:
return item
raise
def _cannot_have_variables(self, item):
return not (is_string(item) and '{' in item)
def replace_string(self, string, ignore_errors=False):
"""Replaces variables from a string. Result is always a string."""
if not is_string(string):
return unic(string)
if self._cannot_have_variables(string):
return unescape(string)
return self._replace_string(string, ignore_errors=ignore_errors)
def _replace_string(self, string, splitter=None, ignore_errors=False):
if not splitter:
splitter = VariableSplitter(string)
return ''.join(self._yield_replaced(string, splitter, ignore_errors))
def _yield_replaced(self, string, splitter, ignore_errors=False):
while splitter.identifier:
yield unescape(string[:splitter.start])
try:
value = self._get_variable(splitter)
except DataError:
if not ignore_errors:
raise
value = string[splitter.start:splitter.end]
yield unic(value)
string = string[splitter.end:]
splitter = VariableSplitter(string)
yield unescape(string)
def _get_variable(self, splitter):
if splitter.identifier not in '$@&%':
return self._get_reserved_variable(splitter)
name = splitter.get_replaced_variable(self)
variable = self._variables[name]
for item in splitter.items:
variable = self._get_variable_item(name, variable, item)
name = '%s[%s]' % (name, item)
return variable
def _get_variable_item(self, name, variable, item):
if is_dict_like(variable):
return self._get_dict_variable_item(name, variable, item)
if is_list_like(variable):
return self._get_list_variable_item(name, variable, item)
raise VariableError("Variable '%s' is %s, not list or dictionary, "
"and thus accessing item '%s' from it is not "
"possible."
% (name, type_name(variable), item))
def _get_reserved_variable(self, splitter):
value = splitter.get_replaced_variable(self)
LOGGER.warn("Syntax '%s' is reserved for future use. Please "
"escape it like '\\%s'." % (value, value))
return value
def _get_list_variable_item(self, name, variable, index):
index = self.replace_string(index)
try:
index = self._parse_list_variable_index(index, name[0] == '$')
except ValueError:
raise VariableError("List '%s' used with invalid index '%s'."
% (name, index))
try:
return variable[index]
except IndexError:
raise VariableError("List '%s' has no item in index %d."
% (name, index))
def _parse_list_variable_index(self, index, support_slice=True):
if ':' not in index:
return int(index)
if index.count(':') > 2 or not support_slice:
raise ValueError
return slice(*[int(i) if i else None for i in index.split(':')])
def _get_dict_variable_item(self, name, variable, key):
key = self.replace_scalar(key)
try:
return variable[key]
except KeyError:
raise VariableError("Dictionary '%s' has no key '%s'."
% (name, key))
except TypeError as err:
raise VariableError("Dictionary '%s' used with invalid key: %s"
% (name, err)) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/replacer.py | 0.706798 | 0.241758 | replacer.py | pypi |
from robotide.lib.robot.utils import is_string, py2to3
class VariableSplitter(object):
def __init__(self, string, identifiers='$@%&*'):
self.identifier = None
self.base = None
self.items = []
self.start = -1
self.end = -1
self._identifiers = identifiers
self._may_have_internal_variables = False
if not is_string(string):
self._max_end = -1
return
self._max_end = len(string)
if self._split(string):
self._finalize()
def get_replaced_variable(self, replacer):
if self._may_have_internal_variables:
base = replacer.replace_string(self.base)
else:
base = self.base
# This omits possible variable items.
return '%s{%s}' % (self.identifier, base)
def is_variable(self):
return bool(self.identifier and self.base and
self.start == 0 and self.end == self._max_end)
def is_list_variable(self):
return bool(self.identifier == '@' and self.base and
self.start == 0 and self.end == self._max_end and
not self.items)
def is_dict_variable(self):
return bool(self.identifier == '&' and self.base and
self.start == 0 and self.end == self._max_end and
not self.items)
def _finalize(self):
self.identifier = self._variable_chars[0]
self.base = ''.join(self._variable_chars[2:-1])
self.end = self.start + len(self._variable_chars)
if self.items:
self.end += len(''.join(self.items)) + 2 * len(self.items)
def _split(self, string):
start_index, max_index = self._find_variable(string)
if start_index == -1:
return False
self.start = start_index
self._open_curly = 1
self._state = self._variable_state
self._variable_chars = [string[start_index], '{']
self._item_chars = []
self._string = string
start_index += 2
for index, char in enumerate(string[start_index:], start=start_index):
try:
self._state(char, index)
except StopIteration:
break
if index == max_index and not self._scanning_item():
break
return True
def _scanning_item(self):
return self._state in (self._waiting_item_state, self._item_state)
def _find_variable(self, string):
max_end_index = string.rfind('}')
if max_end_index == -1:
return -1, -1
if self._is_escaped(string, max_end_index):
return self._find_variable(string[:max_end_index])
start_index = self._find_start_index(string, 1, max_end_index)
if start_index == -1:
return -1, -1
return start_index, max_end_index
def _find_start_index(self, string, start, end):
while True:
index = string.find('{', start, end) - 1
if index < 0:
return -1
if self._start_index_is_ok(string, index):
return index
start = index + 2
def _start_index_is_ok(self, string, index):
return (string[index] in self._identifiers
and not self._is_escaped(string, index))
def _is_escaped(self, string, index):
escaped = False
while index > 0 and string[index-1] == '\\':
index -= 1
escaped = not escaped
return escaped
def _variable_state(self, char, index):
self._variable_chars.append(char)
if char == '}' and not self._is_escaped(self._string, index):
self._open_curly -= 1
if self._open_curly == 0:
if not self._can_have_item():
raise StopIteration
self._state = self._waiting_item_state
elif char in self._identifiers:
self._state = self._internal_variable_start_state
def _can_have_item(self):
return self._variable_chars[0] in '$@&'
def _internal_variable_start_state(self, char, index):
self._state = self._variable_state
if char == '{':
self._variable_chars.append(char)
self._open_curly += 1
self._may_have_internal_variables = True
else:
self._variable_state(char, index)
def _waiting_item_state(self, char, index):
if char != '[':
raise StopIteration
self._state = self._item_state
def _item_state(self, char, index):
if char != ']':
self._item_chars.append(char)
return
self.items.append(''.join(self._item_chars))
self._item_chars = []
# Don't support nested item access with olf @ and & syntax.
# In RF 3.2 old syntax is to be deprecated and in RF 3.3 it
# will be reassigned to mean using variable in list/dict context.
if self._variable_chars[0] in '@&':
raise StopIteration
self._state = self._waiting_item_state
@py2to3
class VariableIterator(object):
def __init__(self, string, identifiers='$@%&*'):
self._string = string
self._identifiers = identifiers
def __iter__(self):
string = self._string
while True:
var = VariableSplitter(string, self._identifiers)
if var.identifier is None:
break
before = string[:var.start]
variable = '%s{%s}' % (var.identifier, var.base)
string = string[var.end:]
yield before, variable, string
def __len__(self):
return sum(1 for _ in self)
def __nonzero__(self):
try:
next(iter(self))
except StopIteration:
return False
else:
return True | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/splitter.py | 0.483405 | 0.224757 | splitter.py | pypi |
import re
from robotide.lib.robot.errors import (DataError, ExecutionStatus, HandlerExecutionFailed, VariableError)
from robotide.lib.robot.utils import (ErrorDetails, format_assign_message, get_error_message, is_number, is_string,
prepr, type_name)
class VariableAssignment(object):
def __init__(self, assignment):
validator = AssignmentValidator()
try:
self.assignment = [validator.validate(var) for var in assignment]
self.error = None
except DataError as err:
self.assignment = assignment
self.error = err
def __iter__(self):
return iter(self.assignment)
def __len__(self):
return len(self.assignment)
def validate_assignment(self):
if self.error:
raise self.error
def assigner(self, context):
self.validate_assignment()
return VariableAssigner(self.assignment, context)
class AssignmentValidator(object):
def __init__(self):
self._seen_list = False
self._seen_dict = False
self._seen_any_var = False
self._seen_assign_mark = False
def validate(self, variable):
variable = self._validate_assign_mark(variable)
self._validate_state(is_list=variable[0] == '@',
is_dict=variable[0] == '&')
return variable
def _validate_assign_mark(self, variable):
if self._seen_assign_mark:
raise DataError("Assign mark '=' can be used only with the last "
"variable.")
if variable.endswith('='):
self._seen_assign_mark = True
return variable[:-1].rstrip()
return variable
def _validate_state(self, is_list, is_dict):
if is_list and self._seen_list:
raise DataError('Assignment can contain only one list variable.')
if self._seen_dict or is_dict and self._seen_any_var:
raise DataError('Dictionary variable cannot be assigned with '
'other variables.')
self._seen_list += is_list
self._seen_dict += is_dict
self._seen_any_var = True
class VariableAssigner(object):
_valid_extended_attr = re.compile(r"^[_a-zA-Z]\w*$")
def __init__(self, assignment, context):
self._assignment = assignment
self._context = context
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val is None:
return
failure = self._get_failure(exc_type, exc_val, exc_tb)
if failure.can_continue(self._context.in_teardown):
self.assign(failure.return_value)
@staticmethod
def _get_failure(exc_type, exc_val, exc_tb):
if isinstance(exc_val, ExecutionStatus):
return exc_val
exc_info = (exc_type, exc_val, exc_tb)
return HandlerExecutionFailed(ErrorDetails(exc_info))
def assign(self, return_value):
context = self._context
context.trace(lambda: 'Return: %s' % prepr(return_value))
resolver = ReturnValueResolver(self._assignment)
for name, value in resolver.resolve(return_value):
if not self._extended_assign(name, value, context.variables):
value = self._normal_assign(name, value, context.variables)
context.info(format_assign_message(name, value))
def _extended_assign(self, name, value, variables):
if name[0] != '$' or '.' not in name or name in variables:
return False
base, attr = self._split_extended_assign(name)
try:
var = variables[base]
except DataError:
return False
if not (self._variable_supports_extended_assign(var) and
self._is_valid_extended_attribute(attr)):
return False
try:
setattr(var, attr, value)
except Exception:
raise VariableError("Setting attribute '%s' to variable '%s' "
"failed: %s" % (attr, base, get_error_message()))
return True
@staticmethod
def _split_extended_assign(name):
base, attr = name.rsplit('.', 1)
return base.strip() + '}', attr[:-1].strip()
@staticmethod
def _variable_supports_extended_assign(var):
return not (is_string(var) or is_number(var))
def _is_valid_extended_attribute(self, attr):
return self._valid_extended_attr.match(attr) is not None
@staticmethod
def _normal_assign(name, value, variables):
variables[name] = value
# Always return the actually assigned value.
return value if name[0] == '$' else variables[name]
def ReturnValueResolver(assignment):
if not assignment:
return NoReturnValueResolver()
if len(assignment) == 1:
return OneReturnValueResolver(assignment[0])
if any(a[0] == '@' for a in assignment):
return ScalarsAndListReturnValueResolver(assignment)
return ScalarsOnlyReturnValueResolver(assignment)
class NoReturnValueResolver(object):
@staticmethod
def resolve(return_value):
_ = return_value
return []
class OneReturnValueResolver(object):
def __init__(self, variable):
self._variable = variable
def resolve(self, return_value):
if return_value is None:
identifier = self._variable[0]
return_value = {'$': None, '@': [], '&': {}}[identifier]
return [(self._variable, return_value)]
class _MultiReturnValueResolver(object):
def __init__(self, variables):
self._variables = variables
self._min_count = len(variables)
def resolve(self, return_value):
return_value = self._convert_to_list(return_value)
self._validate(len(return_value))
return self._resolve(return_value)
def _convert_to_list(self, return_value):
if return_value is None:
return [None] * self._min_count
if is_string(return_value):
self._raise_expected_list(return_value)
try:
return list(return_value)
except TypeError:
self._raise_expected_list(return_value)
def _raise_expected_list(self, ret):
self._raise('Expected list-like value, got %s.' % type_name(ret))
def _raise(self, error):
raise VariableError('Cannot set variables: %s' % error)
def _validate(self, return_count):
raise NotImplementedError
def _resolve(self, return_value):
raise NotImplementedError
class ScalarsOnlyReturnValueResolver(_MultiReturnValueResolver):
def _validate(self, return_count):
if return_count != self._min_count:
self._raise('Expected %d return values, got %d.'
% (self._min_count, return_count))
def _resolve(self, return_value):
return list(zip(self._variables, return_value))
class ScalarsAndListReturnValueResolver(_MultiReturnValueResolver):
def __init__(self, variables):
_MultiReturnValueResolver.__init__(self, variables)
self._min_count -= 1
def _validate(self, return_count):
if return_count < self._min_count:
self._raise('Expected %d or more return values, got %d.'
% (self._min_count, return_count))
def _resolve(self, return_value):
before_vars, list_var, after_vars \
= self._split_variables(self._variables)
before_items, list_items, after_items \
= self._split_return(return_value, before_vars, after_vars)
before = list(zip(before_vars, before_items))
after = list(zip(after_vars, after_items))
return before + [(list_var, list_items)] + after
@staticmethod
def _split_variables(variables):
list_index = [v[0] for v in variables].index('@')
return (variables[:list_index],
variables[list_index],
variables[list_index+1:])
@staticmethod
def _split_return(return_value, before_vars, after_vars):
list_start = len(before_vars)
list_end = len(return_value) - len(after_vars)
return (return_value[:list_start],
return_value[list_start:list_end],
return_value[list_end:]) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/variables/assigner.py | 0.532425 | 0.163612 | assigner.py | pypi |
import logging
from robotide.lib.robot.output import librarylogger
from robotide.lib.robot.running.context import EXECUTION_CONTEXTS
def write(msg, level='INFO', html=False):
"""Writes the message to the log file using the given level.
Valid log levels are ``TRACE``, ``DEBUG``, ``INFO`` (default since RF
2.9.1), ``WARN``, and ``ERROR`` (new in RF 2.9). Additionally it is
possible to use ``HTML`` pseudo log level that logs the message as HTML
using the ``INFO`` level.
Instead of using this method, it is generally better to use the level
specific methods such as ``info`` and ``debug`` that have separate
``html`` argument to control the message format.
"""
if EXECUTION_CONTEXTS.current is not None:
librarylogger.write(msg, level, html)
else:
logger = logging.getLogger("RobotFramework")
level = {'TRACE': logging.DEBUG // 2,
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'HTML': logging.INFO,
'WARN': logging.WARN,
'ERROR': logging.ERROR}[level]
logger.log(level, msg)
def trace(msg, html=False):
"""Writes the message to the log file using the ``TRACE`` level."""
write(msg, 'TRACE', html)
def debug(msg, html=False):
"""Writes the message to the log file using the ``DEBUG`` level."""
write(msg, 'DEBUG', html)
def info(msg, html=False, also_console=False):
"""Writes the message to the log file using the ``INFO`` level.
If ``also_console`` argument is set to ``True``, the message is
written both to the log file and to the console.
"""
write(msg, 'INFO', html)
if also_console:
console(msg)
def warn(msg, html=False):
"""Writes the message to the log file using the ``WARN`` level."""
write(msg, 'WARN', html)
def error(msg, html=False):
"""Writes the message to the log file using the ``ERROR`` level.
New in Robot Framework 2.9.
"""
write(msg, 'ERROR', html)
def console(msg, newline=True, stream='stdout'):
"""Writes the message to the console.
If the ``newline`` argument is ``True``, a newline character is
automatically added to the message.
By default the message is written to the standard output stream.
Using the standard error stream is possibly by giving the ``stream``
argument value ``'stderr'``.
"""
librarylogger.console(msg, newline, stream) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/api/logger.py | 0.627837 | 0.342599 | logger.py | pypi |
from __future__ import division
from robotide.lib.robot.result import ResultVisitor
from robotide.lib.robot.utils import XmlWriter
class XUnitWriter(object):
def __init__(self, execution_result, skip_noncritical):
self._execution_result = execution_result
self._skip_noncritical = skip_noncritical
def write(self, output):
writer = XUnitFileWriter(XmlWriter(output), self._skip_noncritical)
self._execution_result.visit(writer)
class XUnitFileWriter(ResultVisitor):
"""Provides an xUnit-compatible result file.
Attempts to adhere to the de facto schema guessed by Peter Reilly, see:
http://marc.info/?l=ant-dev&m=123551933508682
"""
def __init__(self, xml_writer, skip_noncritical=False):
self._writer = xml_writer
self._root_suite = None
self._skip_noncritical = skip_noncritical
def start_suite(self, suite):
if self._root_suite:
return
self._root_suite = suite
tests, failures, skipped = self._get_stats(suite.statistics)
attrs = {'name': suite.name,
'tests': tests,
'errors': '0',
'failures': failures,
'skipped': skipped,
'time': self._time_as_seconds(suite.elapsedtime)}
self._writer.start('testsuite', attrs)
def _get_stats(self, statistics):
if self._skip_noncritical:
failures = statistics.critical.failed
skipped = statistics.all.total - statistics.critical.total
else:
failures = statistics.all.failed
skipped = 0
return str(statistics.all.total), str(failures), str(skipped)
def end_suite(self, suite):
if suite is self._root_suite:
self._writer.end('testsuite')
def visit_test(self, test):
self._writer.start('testcase',
{'classname': test.parent.longname,
'name': test.name,
'time': self._time_as_seconds(test.elapsedtime)})
if self._skip_noncritical and not test.critical:
self._skip_test(test)
elif not test.passed:
self._fail_test(test)
self._writer.end('testcase')
def _skip_test(self, test):
self._writer.element('skipped', '%s: %s' % (test.status, test.message)
if test.message else test.status)
def _fail_test(self, test):
self._writer.element('failure', attrs={'message': test.message,
'type': 'AssertionError'})
def _time_as_seconds(self, millis):
return '{:.3f}'.format(millis / 1000)
def visit_keyword(self, kw):
pass
def visit_statistics(self, stats):
pass
def visit_errors(self, errors):
pass
def end_result(self, result):
self._writer.close() | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/reporting/xunitwriter.py | 0.796055 | 0.207857 | xunitwriter.py | pypi |
from robotide.lib.robot.conf import RebotSettings
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.model import ModelModifier
from robotide.lib.robot.output import LOGGER
from robotide.lib.robot.result import ExecutionResult, Result
from robotide.lib.robot.utils import unic
from .jsmodelbuilders import JsModelBuilder
from .logreportwriters import LogWriter, ReportWriter
from .xunitwriter import XUnitWriter
class ResultWriter(object):
"""A class to create log, report, output XML and xUnit files.
:param sources: Either one :class:`~robot.result.executionresult.Result`
object, or one or more paths to existing output XML files.
By default writes ``report.html`` and ``log.html``, but no output XML
or xUnit files. Custom file names can be given and results disabled
or enabled using ``settings`` or ``options`` passed to the
:meth:`write_results` method. The latter is typically more convenient::
writer = ResultWriter(result)
writer.write_results(report='custom.html', log=None, xunit='xunit.xml')
"""
def __init__(self, *sources):
self._sources = sources
def write_results(self, settings=None, **options):
"""Writes results based on the given ``settings`` or ``options``.
:param settings: :class:`~robot.conf.settings.RebotSettings` object
to configure result writing.
:param options: Used to construct new
:class:`~robot.conf.settings.RebotSettings` object if ``settings``
are not given.
"""
settings = settings or RebotSettings(options)
results = Results(settings, *self._sources)
if settings.output:
self._write_output(results.result, settings.output)
if settings.xunit:
self._write_xunit(results.result, settings.xunit,
settings.xunit_skip_noncritical)
if settings.log:
config = dict(settings.log_config,
minLevel=results.js_result.min_level)
self._write_log(results.js_result, settings.log, config)
if settings.report:
results.js_result.remove_data_not_needed_in_report()
self._write_report(results.js_result, settings.report,
settings.report_config)
return results.return_code
def _write_output(self, result, path):
self._write('Output', result.save, path)
def _write_xunit(self, result, path, skip_noncritical):
self._write('XUnit', XUnitWriter(result, skip_noncritical).write, path)
def _write_log(self, js_result, path, config):
self._write('Log', LogWriter(js_result).write, path, config)
def _write_report(self, js_result, path, config):
self._write('Report', ReportWriter(js_result).write, path, config)
def _write(self, name, writer, path, *args):
try:
writer(path, *args)
except DataError as err:
LOGGER.error(err.message)
except EnvironmentError as err:
# `err.filename` can be different than `path` at least if reading
# log/report templates or writing split log fails.
# `unic` is needed due to http://bugs.jython.org/issue1825.
LOGGER.error("Writing %s file '%s' failed: %s: %s" %
(name.lower(), path, err.strerror, unic(err.filename)))
else:
LOGGER.output_file(name, path)
class Results(object):
def __init__(self, settings, *sources):
self._settings = settings
self._sources = sources
if len(sources) == 1 and isinstance(sources[0], Result):
self._result = sources[0]
self._prune = False
self.return_code = self._result.return_code
else:
self._result = None
self._prune = True
self.return_code = -1
self._js_result = None
@property
def result(self):
if self._result is None:
include_keywords = bool(self._settings.log or self._settings.output)
flattened = self._settings.flatten_keywords
self._result = ExecutionResult(include_keywords=include_keywords,
flattened_keywords=flattened,
merge=self._settings.merge,
rpa=self._settings.rpa,
*self._sources)
if self._settings.rpa is None:
self._settings.rpa = self._result.rpa
self._result.configure(self._settings.status_rc,
self._settings.suite_config,
self._settings.statistics_config)
modifier = ModelModifier(self._settings.pre_rebot_modifiers,
self._settings.process_empty_suite,
LOGGER)
self._result.suite.visit(modifier)
self.return_code = self._result.return_code
return self._result
@property
def js_result(self):
if self._js_result is None:
builder = JsModelBuilder(log_path=self._settings.log,
split_log=self._settings.split_log,
prune_input_to_save_memory=self._prune)
self._js_result = builder.build_from(self.result)
if self._prune:
self._result = None
return self._js_result | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/reporting/resultwriter.py | 0.803868 | 0.178974 | resultwriter.py | pypi |
from robotide.lib.robot.utils import is_string, py2to3, unicode
from .comments import Comment
from ..version import ALIAS_MARKER
@py2to3
class Setting(object):
def __init__(self, setting_name, parent=None, comment=None):
self.setting_name = setting_name
self.parent = parent
self._set_initial_value()
self._set_comment(comment)
self._populated = False
def _set_initial_value(self):
self.value = []
def _set_comment(self, comment):
self.comment = Comment(comment)
def reset(self):
self.__init__(self.setting_name, self.parent)
@property
def source(self):
return self.parent.source if self.parent is not None else None
@property
def directory(self):
return self.parent.directory if self.parent is not None else None
def populate(self, value, comment=None):
"""Mainly used at parsing time, later attributes can be set directly."""
if not self._populated:
self._populate(value)
self._set_comment(comment)
self._populated = True
else:
self._set_initial_value()
self._set_comment(None)
self.report_invalid_syntax("Setting '%s' used multiple times."
% self.setting_name, 'ERROR')
def _populate(self, value):
self.value = value
def is_set(self):
return bool(self.value)
@staticmethod
def is_for_loop():
return False
def report_invalid_syntax(self, message, level='ERROR'):
self.parent.report_invalid_syntax(message, level)
def _string_value(self, value):
return value if is_string(value) else ' '.join(value)
def _concat_string_with_value(self, string, value):
if string:
return string + ' ' + self._string_value(value)
return self._string_value(value)
def as_list(self):
return self._data_as_list() + self.comment.as_list()
def _data_as_list(self):
ret = [self.setting_name]
if self.value:
ret.extend(self.value)
return ret
def __nonzero__(self):
return self.is_set()
def __iter__(self):
return iter(self.value or ())
def __unicode__(self):
return unicode(self.value or '')
class StringValueJoiner(object):
def __init__(self, separator):
self._separator = separator
def join_string_with_value(self, string, value):
if string:
return string + self._separator + self.string_value(value)
return self.string_value(value)
def string_value(self, value):
if is_string(value):
return value
return self._separator.join(value)
class Documentation(Setting):
def _set_initial_value(self):
self.value = ''
def _populate(self, value):
self.value = self._concat_string_with_value(self.value, value)
def _string_value(self, value):
return value if is_string(value) else ''.join(value)
def _data_as_list(self):
return [self.setting_name, self.value]
class Template(Setting):
def _set_initial_value(self):
self.value = None
def _populate(self, value):
self.value = self._concat_string_with_value(self.value, value)
def is_set(self):
return self.value is not None
def is_active(self):
return self.value and self.value.upper() != 'NONE'
def _data_as_list(self):
ret = [self.setting_name]
if self.value:
ret.append(self.value)
return ret
class Fixture(Setting):
# `keyword`, `is_comment` and `assign` make the API compatible with Step.
@property
def keyword(self):
return self.name or ''
@staticmethod
def is_comment():
return False
def _set_initial_value(self):
self.name = None
self.args = []
self.assign = ()
def _populate(self, value):
if not self.name:
self.name = value[0] if value else ''
value = value[1:]
self.args.extend(value)
def is_set(self):
return self.name is not None
def is_active(self):
return self.name and self.name.upper() != 'NONE'
def _data_as_list(self):
ret = [self.setting_name]
if self.name or self.args:
ret.append(self.name or '')
if self.args:
ret.extend(self.args)
return ret
class Timeout(Setting):
def _set_initial_value(self):
self.value = None
self.message = ''
def _populate(self, value):
if not self.value:
self.value = value[0] if value else ''
value = value[1:]
self.message = self._concat_string_with_value(self.message, value)
# DEBUG: Remove custom timeout message support in RF 3.2.
if value and self.parent:
self.parent.report_invalid_syntax(
'Using custom timeout messages is deprecated since Robot '
'Framework 3.0.1 and will be removed in future versions. '
"Message that was used is '%s'." % self.message, level='WARN')
def is_set(self):
return self.value is not None
def _data_as_list(self):
ret = [self.setting_name]
if self.value or self.message:
ret.append(self.value or '')
if self.message:
ret.append(self.message)
return ret
class Tags(Setting):
def _set_initial_value(self):
self.value = None
def _populate(self, value):
self.value = (self.value or []) + value
def is_set(self):
return self.value is not None
def __add__(self, other):
if not isinstance(other, Tags):
raise TypeError('Tags can only be added with tags')
tags = Tags('Tags')
tags.value = (self.value or []) + (other.value or [])
return tags
class Arguments(Setting):
pass
class Return(Setting):
pass
class Metadata(Setting):
setting_name = 'Metadata'
def __init__(self, parent, name, value, comment=None, joined=False):
self.parent = parent
self.name = name
joiner = StringValueJoiner('' if joined else ' ')
self.value = joiner.join_string_with_value('', value)
self._set_comment(comment)
def reset(self):
""" Just overriding """
pass
def is_set(self):
return True
def _data_as_list(self):
return [self.setting_name, self.name, self.value]
class ImportSetting(Setting):
def __init__(self, parent, name, args=None, alias=None, comment=None):
self.parent = parent
self.name = name
self.args = args or []
self.alias = alias
self._set_comment(comment)
def reset(self):
""" Just overriding """
pass
@property
def type(self):
return type(self).__name__
def is_set(self):
return True
def _data_as_list(self):
return [self.type, self.name] + self.args
def report_invalid_syntax(self, message, level='ERROR', parent=None):
parent = parent or getattr(self, 'parent', None)
if parent:
parent.report_invalid_syntax(message, level)
else:
from robotide.lib.robot.api import logger
logger.write(message, level)
class Library(ImportSetting):
def __init__(self, parent, name, args=None, alias=None, comment=None):
if args and not alias:
args, alias = self._split_possible_alias(args)
ImportSetting.__init__(self, parent, name, args, alias, comment)
@staticmethod
def _split_possible_alias(args):
if len(args) > 1 and (args[-2] == ALIAS_MARKER or args[-2] == 'WITH NAME'):
return args[:-2], args[-1]
return args, None
def _data_as_list(self):
data = ['Library', self.name] + self.args
if self.alias:
data += [ALIAS_MARKER, self.alias]
return data
class Resource(ImportSetting):
def __init__(self, parent, name, invalid_args=None, comment=None):
if invalid_args:
name += ' ' + ' '.join(invalid_args)
ImportSetting.__init__(self, parent, name, comment=comment)
class Variables(ImportSetting):
def __init__(self, parent, name, args=None, comment=None):
ImportSetting.__init__(self, parent, name, args, comment=comment)
class _DataList(object):
def __init__(self, parent):
self._parent = parent
self.data = []
def add(self, meta):
self._add(meta)
def _add(self, meta):
self.data.append(meta)
@staticmethod
def _parse_name_and_value(value):
name = value[0] if value else ''
return name, value[1:]
def __getitem__(self, index):
return self.data[index]
def __setitem__(self, index, item):
self.data[index] = item
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
class ImportList(_DataList):
def populate_library(self, data, comment):
self._populate(Library, data, comment)
def populate_resource(self, data, comment):
self._populate(Resource, data, comment)
def populate_variables(self, data, comment):
self._populate(Variables, data, comment)
def _populate(self, item_class, data, comment):
name, value = self._parse_name_and_value(data)
self._add(item_class(self._parent, name, value, comment=comment))
class MetadataList(_DataList):
def populate(self, name, value, comment):
self._add(Metadata(self._parent, name, value, comment, joined=True)) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/parsing/settings.py | 0.799325 | 0.185394 | settings.py | pypi |
from contextlib import contextmanager
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import unic
class ExecutionContexts(object):
def __init__(self):
self._contexts = []
@property
def current(self):
return self._contexts[-1] if self._contexts else None
@property
def top(self):
return self._contexts[0] if self._contexts else None
def __iter__(self):
return iter(self._contexts)
@property
def namespaces(self):
return (context.namespace for context in self)
def start_suite(self, suite, namespace, output, dry_run=False):
ctx = _ExecutionContext(suite, namespace, output, dry_run)
self._contexts.append(ctx)
return ctx
def end_suite(self):
self._contexts.pop()
# This is ugly but currently needed e.g. by BuiltIn
EXECUTION_CONTEXTS = ExecutionContexts()
class _ExecutionContext(object):
_started_keywords_threshold = 42 # Jython on Windows don't work with higher
def __init__(self, suite, namespace, output, dry_run=False):
self.suite = suite
self.test = None
self.timeouts = set()
self.namespace = namespace
self.output = output
self.dry_run = dry_run
self.in_suite_teardown = False
self.in_test_teardown = False
self.in_keyword_teardown = 0
self._started_keywords = 0
self.timeout_occurred = False
@contextmanager
def suite_teardown(self):
self.in_suite_teardown = True
try:
yield
finally:
self.in_suite_teardown = False
@contextmanager
def test_teardown(self, test):
self.variables.set_test('${TEST_STATUS}', test.status)
self.variables.set_test('${TEST_MESSAGE}', test.message)
self.in_test_teardown = True
self._remove_timeout(test.timeout)
try:
yield
finally:
self.in_test_teardown = False
@contextmanager
def keyword_teardown(self, error):
self.variables.set_keyword('${KEYWORD_STATUS}', 'FAIL' if error else 'PASS')
self.variables.set_keyword('${KEYWORD_MESSAGE}', unic(error or ''))
self.in_keyword_teardown += 1
try:
yield
finally:
self.in_keyword_teardown -= 1
@property
@contextmanager
def user_keyword(self):
self.namespace.start_user_keyword()
try:
yield
finally:
self.namespace.end_user_keyword()
@contextmanager
def timeout(self, timeout):
self._add_timeout(timeout)
try:
yield
finally:
self._remove_timeout(timeout)
@property
def in_teardown(self):
return bool(self.in_suite_teardown or
self.in_test_teardown or
self.in_keyword_teardown)
@property
def variables(self):
return self.namespace.variables
def end_suite(self, suite):
for name in ['${PREV_TEST_NAME}',
'${PREV_TEST_STATUS}',
'${PREV_TEST_MESSAGE}']:
self.variables.set_global(name, self.variables[name])
self.output.end_suite(suite)
self.namespace.end_suite(suite)
EXECUTION_CONTEXTS.end_suite()
def set_suite_variables(self, suite):
self.variables['${SUITE_NAME}'] = suite.longname
self.variables['${SUITE_SOURCE}'] = suite.source or ''
self.variables['${SUITE_DOCUMENTATION}'] = suite.doc
self.variables['${SUITE_METADATA}'] = suite.metadata.copy()
def report_suite_status(self, status, message):
self.variables['${SUITE_STATUS}'] = status
self.variables['${SUITE_MESSAGE}'] = message
def start_test(self, test):
self.test = test
self._add_timeout(test.timeout)
self.namespace.start_test()
self.variables.set_test('${TEST_NAME}', test.name)
self.variables.set_test('${TEST_DOCUMENTATION}', test.doc)
self.variables.set_test('@{TEST_TAGS}', list(test.tags))
def _add_timeout(self, timeout):
if timeout:
timeout.start()
self.timeouts.add(timeout)
def _remove_timeout(self, timeout):
if timeout in self.timeouts:
self.timeouts.remove(timeout)
def end_test(self, test):
self.test = None
self._remove_timeout(test.timeout)
self.namespace.end_test()
self.variables.set_suite('${PREV_TEST_NAME}', test.name)
self.variables.set_suite('${PREV_TEST_STATUS}', test.status)
self.variables.set_suite('${PREV_TEST_MESSAGE}', test.message)
self.timeout_occurred = False
def start_keyword(self, keyword):
self._started_keywords += 1
if self._started_keywords > self._started_keywords_threshold:
raise DataError('Maximum limit of started keywords exceeded.')
self.output.start_keyword(keyword)
def end_keyword(self, keyword):
self.output.end_keyword(keyword)
self._started_keywords -= 1
def get_runner(self, name):
return self.namespace.get_runner(name)
def trace(self, message):
self.output.trace(message)
def debug(self, message):
self.output.debug(message)
def info(self, message):
self.output.info(message)
def warn(self, message):
self.output.warn(message)
def fail(self, message):
self.output.fail(message) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/context.py | 0.61057 | 0.202207 | context.py | pypi |
from robotide.lib.robot.errors import (ExecutionFailed, ExecutionFailures, ExecutionPassed,
ExitForLoop, ContinueForLoop, DataError)
from robotide.lib.robot.result import Keyword as KeywordResult
from robotide.lib.robot.utils import (format_assign_message, frange, get_error_message,
is_list_like, is_number, plural_or_not as s, type_name)
from robotide.lib.robot.variables import is_scalar_var
from .statusreporter import StatusReporter
class StepRunner(object):
def __init__(self, context, templated=False):
self._context = context
self._templated = bool(templated)
def run_steps(self, steps):
errors = []
for step in steps:
try:
self.run_step(step)
except ExecutionPassed as exception:
exception.set_earlier_failures(errors)
raise exception
except ExecutionFailed as exception:
errors.extend(exception.get_errors())
if not exception.can_continue(self._context.in_teardown,
self._templated,
self._context.dry_run):
break
if errors:
raise ExecutionFailures(errors)
def run_step(self, step, name=None):
context = self._context
if step.type == step.FOR_LOOP_TYPE:
runner = ForRunner(context, self._templated, step.flavor)
return runner.run(step)
runner = context.get_runner(name or step.name)
if context.dry_run:
return runner.dry_run(step, context)
return runner.run(step, context)
def ForRunner(context, templated=False, flavor='IN'):
runners = {'IN': ForInRunner,
'IN RANGE': ForInRangeRunner,
'IN ZIP': ForInZipRunner,
'IN ENUMERATE': ForInEnumerateRunner}
try:
runner = runners[flavor]
except KeyError:
return InvalidForRunner(context, flavor)
return runner(context, templated)
class ForInRunner(object):
def __init__(self, context, templated=False):
self._context = context
self._templated = templated
def run(self, data, name=None):
result = KeywordResult(kwname=self._get_name(data),
type=data.FOR_LOOP_TYPE)
with StatusReporter(self._context, result):
self._validate(data)
self._run(data)
def _get_name(self, data):
return '%s %s [ %s ]' % (' | '.join(data.variables),
self._flavor_name(),
' | '.join(data.values))
def _flavor_name(self):
return 'IN'
def _validate(self, data):
if not data.variables:
raise DataError('FOR loop has no loop variables.')
for var in data.variables:
if not is_scalar_var(var):
raise DataError("Invalid FOR loop variable '%s'." % var)
if not data.values:
raise DataError('FOR loop has no loop values.')
if not data.keywords:
raise DataError('FOR loop contains no keywords.')
def _run(self, data):
errors = []
for values in self._get_values_for_one_round(data):
try:
self._run_one_round(data, values)
except ExitForLoop as exception:
if exception.earlier_failures:
errors.extend(exception.earlier_failures.get_errors())
break
except ContinueForLoop as exception:
if exception.earlier_failures:
errors.extend(exception.earlier_failures.get_errors())
continue
except ExecutionPassed as exception:
exception.set_earlier_failures(errors)
raise exception
except ExecutionFailed as exception:
errors.extend(exception.get_errors())
if not exception.can_continue(self._context.in_teardown,
self._templated,
self._context.dry_run):
break
if errors:
raise ExecutionFailures(errors)
def _get_values_for_one_round(self, data):
if not self._context.dry_run:
values = self._replace_variables(data)
var_count = self._values_per_iteration(data.variables)
for i in range(0, len(values), var_count):
yield values[i:i+var_count]
else:
yield data.variables
def _replace_variables(self, data):
values = self._context.variables.replace_list(data.values)
values = self._transform_items(values)
values_per_iteration = self._values_per_iteration(data.variables)
if len(values) % values_per_iteration == 0:
return values
self._raise_wrong_variable_count(values_per_iteration, len(values))
def _raise_wrong_variable_count(self, variables, values):
raise DataError('Number of FOR loop values should be multiple of '
'its variables. Got %d variables but %d value%s.'
% (variables, values, s(values)))
def _run_one_round(self, data, values):
name = ', '.join(format_assign_message(var, item)
for var, item in zip(data.variables, values))
result = KeywordResult(kwname=name,
type=data.FOR_ITEM_TYPE)
for var, value in zip(data.variables, values):
self._context.variables[var] = value
runner = StepRunner(self._context, self._templated)
with StatusReporter(self._context, result):
runner.run_steps(data.keywords)
def _transform_items(self, items):
return items
def _values_per_iteration(self, variables):
"""
The number of values per iteration;
used to check if we have (a multiple of this) values.
This is its own method to support loops like ForInEnumerate
which add/remove items to the pool.
"""
return len(variables)
class ForInRangeRunner(ForInRunner):
def __init__(self, context, templated=False):
super(ForInRangeRunner, self).__init__(context, templated)
def _flavor_name(self):
return 'IN RANGE'
def _transform_items(self, items):
try:
items = [self._to_number_with_arithmetics(item) for item in items]
except:
raise DataError('Converting argument of FOR IN RANGE failed: %s.'
% get_error_message())
if not 1 <= len(items) <= 3:
raise DataError('FOR IN RANGE expected 1-3 arguments, got %d.'
% len(items))
return frange(*items)
def _to_number_with_arithmetics(self, item):
if is_number(item):
return item
number = eval(str(item), {})
if not is_number(number):
raise TypeError("Expected number, got %s." % type_name(item))
return number
class ForInZipRunner(ForInRunner):
def __init__(self, context, templated=False):
super(ForInZipRunner, self).__init__(context, templated)
def _flavor_name(self):
return 'IN ZIP'
def _replace_variables(self, data):
values = super(ForInZipRunner, self)._replace_variables(data)
if len(data.variables) == len(data.values):
return values
raise DataError('FOR IN ZIP expects an equal number of variables and '
'iterables. Got %d variable%s and %d iterable%s.'
% (len(data.variables), s(data.variables),
len(data.values), s(data.values)))
def _transform_items(self, items):
answer = list()
for item in items:
if not is_list_like(item):
raise DataError('FOR IN ZIP items must all be list-like, '
'got %s.' % type_name(item))
for zipped_item in zip(*[list(item) for item in items]):
answer.extend(zipped_item)
return answer
class ForInEnumerateRunner(ForInRunner):
def __init__(self, context, templated=False):
super(ForInEnumerateRunner, self).__init__(context, templated)
def _flavor_name(self):
return 'IN ENUMERATE'
def _values_per_iteration(self, variables):
if len(variables) < 2:
raise DataError('FOR IN ENUMERATE expected 2 or more loop '
'variables, got %d.' % len(variables))
return len(variables) - 1
def _get_values_for_one_round(self, data):
parent = super(ForInEnumerateRunner, self)
for index, values in enumerate(parent._get_values_for_one_round(data)):
yield [index] + values
def _raise_wrong_variable_count(self, variables, values):
raise DataError('Number of FOR IN ENUMERATE loop values should be '
'multiple of its variables (excluding the index). '
'Got %d variable%s but %d value%s.'
% (variables, s(variables), values, s(values)))
class InvalidForRunner(ForInRunner):
"""Used to send an error from ForRunner() if it sees an unexpected error.
We can't simply throw a DataError from ForRunner() because that happens
outside the "with StatusReporter(...)" blocks.
"""
def __init__(self, context, flavor):
super(InvalidForRunner, self).__init__(context, False)
self.flavor = flavor
def _run(self, data, *args, **kwargs):
raise DataError("Invalid FOR loop type '%s'. Expected 'IN', "
"'IN RANGE', 'IN ZIP', or 'IN ENUMERATE'."
% self.flavor) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/steprunner.py | 0.520984 | 0.269045 | steprunner.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import (get_error_message, is_java_method, is_bytes, is_unicode, is_tuple, py2to3)
from .arguments import JavaArgumentParser, PythonArgumentParser
def no_dynamic_method(*args):
pass
@py2to3
class _DynamicMethod(object):
_underscore_name = NotImplemented
def __init__(self, lib):
self.method = self._get_method(lib)
def _get_method(self, lib):
for name in self._underscore_name, self._camelCaseName:
method = getattr(lib, name, None)
if callable(method):
return method
return no_dynamic_method
@property
def _camelCaseName(self):
tokens = self._underscore_name.split('_')
return ''.join([tokens[0]] + [t.capitalize() for t in tokens[1:]])
@property
def name(self):
return self.method.__name__
def __call__(self, *args):
try:
return self._handle_return_value(self.method(*args))
except:
raise DataError("Calling dynamic method '%s' failed: %s"
% (self.method.__name__, get_error_message()))
def _handle_return_value(self, value):
raise NotImplementedError
def _to_string(self, value):
if is_unicode(value):
return value
if is_bytes(value):
return value.decode('UTF-8')
if is_tuple(value):
return f"{value[0]}={value[1]}"
raise DataError('Return value must be string.')
def _to_list_of_strings(self, value):
try:
return [self._to_string(v) for v in value]
except (TypeError, DataError):
raise DataError('Return value must be list of strings.')
def __nonzero__(self):
return self.method is not no_dynamic_method
class GetKeywordNames(_DynamicMethod):
_underscore_name = 'get_keyword_names'
def _handle_return_value(self, value):
names = self._to_list_of_strings(value or [])
return list(self._remove_duplicates(names))
def _remove_duplicates(self, names):
seen = set()
for name in names:
if name not in seen:
seen.add(name)
yield name
class RunKeyword(_DynamicMethod):
_underscore_name = 'run_keyword'
@property
def supports_kwargs(self):
if is_java_method(self.method):
return self._supports_java_kwargs(self.method)
return self._supports_python_kwargs(self.method)
def _supports_python_kwargs(self, method):
spec = PythonArgumentParser().parse(method)
return len(spec.positional) == 3
def _supports_java_kwargs(self, method):
func = self.method.im_func if hasattr(method, 'im_func') else method
signatures = func.argslist[:func.nargs]
spec = JavaArgumentParser().parse(signatures)
return (self._java_single_signature_kwargs(spec) or
self._java_multi_signature_kwargs(spec))
def _java_single_signature_kwargs(self, spec):
return len(spec.positional) == 1 and spec.varargs and spec.kwargs
def _java_multi_signature_kwargs(self, spec):
return len(spec.positional) == 3 and not (spec.varargs or spec.kwargs)
class GetKeywordDocumentation(_DynamicMethod):
_underscore_name = 'get_keyword_documentation'
def _handle_return_value(self, value):
return self._to_string(value or '')
class GetKeywordArguments(_DynamicMethod):
_underscore_name = 'get_keyword_arguments'
def __init__(self, lib):
_DynamicMethod.__init__(self, lib)
self._supports_kwargs = RunKeyword(lib).supports_kwargs
def _handle_return_value(self, value):
if value is None:
if self._supports_kwargs:
return ['*varargs', '**kwargs']
return ['*varargs']
return self._to_list_of_strings(value)
class GetKeywordTypes(_DynamicMethod):
_underscore_name = 'get_keyword_types'
def _handle_return_value(self, value):
return value
class GetKeywordTags(_DynamicMethod):
_underscore_name = 'get_keyword_tags'
def _handle_return_value(self, value):
return self._to_list_of_strings(value or []) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/dynamicmethods.py | 0.760473 | 0.225929 | dynamicmethods.py | pypi |
from robotide.lib.robot import model
from robotide.lib.robot.conf import RobotSettings
from robotide.lib.robot.output import LOGGER, Output, pyloggingconf
from robotide.lib.robot.utils import setter
from .steprunner import StepRunner
from .randomizer import Randomizer
class Keyword(model.Keyword):
"""Represents a single executable keyword.
These keywords never have child keywords or messages. The actual keyword
that is executed depends on the context where this model is executed.
See the base class for documentation of attributes not documented here.
"""
__slots__ = []
message_class = None #: Internal usage only.
def run(self, context):
"""Execute the keyword.
Typically called internally by :meth:`TestSuite.run`.
"""
return StepRunner(context).run_step(self)
class ForLoop(Keyword):
"""Represents a for loop in test data.
Contains keywords in the loop body as child :attr:`keywords`.
"""
__slots__ = ['flavor']
keyword_class = Keyword #: Internal usage only.
def __init__(self, variables, values, flavor):
Keyword.__init__(self, assign=variables, args=values,
type=Keyword.FOR_LOOP_TYPE)
self.flavor = flavor
@property
def variables(self):
return self.assign
@property
def values(self):
return self.args
class TestCase(model.TestCase):
"""Represents a single executable test case.
See the base class for documentation of attributes not documented here.
"""
__slots__ = ['template']
keyword_class = Keyword #: Internal usage only.
def __init__(self, name='', doc='', tags=None, timeout=None, template=None):
model.TestCase.__init__(self, name, doc, tags, timeout)
#: Name of the keyword that has been used as template
#: when building the test. ``None`` if no is template used.
self.template = template
@setter
def timeout(self, timeout):
"""Test timeout as a :class:`Timeout` instance or ``None``.
This attribute is likely to change in the future.
"""
return Timeout(*timeout) if timeout else None
class TestSuite(model.TestSuite):
"""Represents a single executable test suite.
See the base class for documentation of attributes not documented here.
"""
__slots__ = ['resource']
test_class = TestCase #: Internal usage only.
keyword_class = Keyword #: Internal usage only.
def __init__(self, name='', doc='', metadata=None, source=None, rpa=False):
model.TestSuite.__init__(self, name, doc, metadata, source, rpa)
#: :class:`ResourceFile` instance containing imports, variables and
#: keywords the suite owns. When data is parsed from the file system,
#: this data comes from the same test case file that creates the suite.
self.resource = ResourceFile(source=source)
def configure(self, randomize_suites=False, randomize_tests=False,
randomize_seed=None, **options):
"""A shortcut to configure a suite using one method call.
Can only be used with the root test suite.
:param randomize_xxx: Passed to :meth:`randomize`.
:param options: Passed to
:class:`~robot.model.configurer.SuiteConfigurer` that will then
set suite attributes, call :meth:`filter`, etc. as needed.
Example::
suite.configure(included_tags=['smoke'],
doc='Smoke test results.')
"""
model.TestSuite.configure(self, **options)
self.randomize(randomize_suites, randomize_tests, randomize_seed)
def randomize(self, suites=True, tests=True, seed=None):
"""Randomizes the order of suites and/or tests, recursively.
:param suites: Boolean controlling should suites be randomized.
:param tests: Boolean controlling should tests be randomized.
:param seed: Random seed. Can be given if previous random order needs
to be re-created. Seed value is always shown in logs and reports.
"""
self.visit(Randomizer(suites, tests, seed))
def run(self, settings=None, **options):
"""Executes the suite based based the given ``settings`` or ``options``.
:param settings: :class:`~robot.conf.settings.RobotSettings` object
to configure test execution.
:param options: Used to construct new
:class:`~robot.conf.settings.RobotSettings` object if ``settings``
are not given.
:return: :class:`~robot.result.executionresult.Result` object with
information about executed suites and tests.
If ``options`` are used, their names are the same as long command line
options except without hyphens. Some options are ignored (see below),
but otherwise they have the same semantics as on the command line.
Options that can be given on the command line multiple times can be
passed as lists like ``variable=['VAR1:value1', 'VAR2:value2']``.
If such an option is used only once, it can be given also as a single
string like ``variable='VAR:value'``.
Additionally listener option allows passing object directly instead of
listener name, e.g. ``run('tests.robot', listener=Listener())``.
To capture stdout and/or stderr streams, pass open file objects in as
special keyword arguments ``stdout`` and ``stderr``, respectively.
Only options related to the actual test execution have an effect.
For example, options related to selecting or modifying test cases or
suites (e.g. ``--include``, ``--name``, ``--prerunmodifier``) or
creating logs and reports are silently ignored. The output XML
generated as part of the execution can be configured, though. This
includes disabling it with ``output=None``.
Example::
stdout = StringIO()
result = suite.run(variable='EXAMPLE:value',
critical='regression',
output='example.xml',
exitonfailure=True,
stdout=stdout)
print result.return_code
To save memory, the returned
:class:`~robot.result.executionresult.Result` object does not
have any information about the executed keywords. If that information
is needed, the created output XML file needs to be read using the
:class:`~robot.result.resultbuilder.ExecutionResult` factory method.
See the :mod:`package level <robot.running>` documentation for
more examples, including how to construct executable test suites and
how to create logs and reports based on the execution results.
See the :func:`robot.run <robot.run.run>` function for a higher-level
API for executing tests in files or directories.
"""
from .namespace import IMPORTER
from .signalhandler import STOP_SIGNAL_MONITOR
from .runner import Runner
with LOGGER:
if not settings:
settings = RobotSettings(options)
LOGGER.register_console_logger(**settings.console_output_config)
with pyloggingconf.robot_handler_enabled(settings.log_level):
with STOP_SIGNAL_MONITOR:
IMPORTER.reset()
output = Output(settings)
runner = Runner(output, settings)
self.visit(runner)
output.close(runner.result)
return runner.result
class Variable(object):
def __init__(self, name, value, source=None):
self.name = name
self.value = value
self.source = source
def report_invalid_syntax(self, message, level='ERROR'):
LOGGER.write("Error in file '%s': Setting variable '%s' failed: %s"
% (self.source or '<unknown>', self.name, message), level)
class Timeout(object):
def __init__(self, value, message=None):
self.value = value
self.message = message
def __str__(self):
return self.value
class ResourceFile(object):
def __init__(self, doc='', source=None):
self.doc = doc
self.source = source
self.imports = []
self.keywords = []
self.variables = []
@setter
def imports(self, imports):
return model.Imports(self.source, imports)
@setter
def keywords(self, keywords):
return model.ItemList(UserKeyword, items=keywords)
@setter
def variables(self, variables):
return model.ItemList(Variable, {'source': self.source}, items=variables)
class UserKeyword(object):
def __init__(self, name, args=(), doc='', tags=(), return_=None, timeout=None):
self.name = name
self.args = args
self.doc = doc
self.tags = tags
self.return_ = return_ or ()
self.timeout = timeout
self.keywords = []
@setter
def keywords(self, keywords):
return model.Keywords(Keyword, self, keywords)
@setter
def timeout(self, timeout):
"""Keyword timeout as a :class:`Timeout` instance or ``None``."""
return Timeout(*timeout) if timeout else None
@setter
def tags(self, tags):
return model.Tags(tags) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/model.py | 0.890978 | 0.346873 | model.py | pypi |
import os.path
import warnings
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.output import LOGGER
from robotide.lib.robot.parsing import TestData, ResourceFile as ResourceData, TEST_EXTENSIONS
from robotide.lib.robot.running.defaults import TestDefaults
from robotide.lib.robot.utils import abspath, is_string, normalize, unic
from robotide.lib.robot.variables import VariableIterator
from .model import ForLoop, Keyword, ResourceFile, TestSuite
class TestSuiteBuilder(object):
"""Creates executable :class:`~robot.running.model.TestSuite` objects.
Suites are build based on existing test data on the file system.
See the overall documentation of the :mod:`robot.running` package for
more information and examples.
"""
def __init__(self, include_suites=None, warn_on_skipped='DEPRECATED',
extension=None, rpa=None):
"""
:param include_suites: List of suite names to include. If ``None`` or
an empty list, all suites are included. When executing tests
normally, these names are specified using the ``--suite`` option.
:param warn_on_skipped: Deprecated.
:param extension: Limit parsing test data to only these files. Files
are specified as an extension that is handled case-insensitively.
Same as ``--extension`` on the command line.
:param rpa: Explicit test execution mode. ``True`` for RPA and
``False`` for test automation. By default mode is got from test
data headers and possible conflicting headers cause an error.
"""
self.include_suites = include_suites
self.extensions = self._get_extensions(extension)
builder = StepBuilder()
self._build_steps = builder.build_steps
self._build_step = builder.build_step
self.rpa = rpa
self._rpa_not_given = rpa is None
# TODO: Remove in RF 3.2.
if warn_on_skipped != 'DEPRECATED':
warnings.warn("Option 'warn_on_skipped' is deprecated and has no "
"effect.", DeprecationWarning)
def _get_extensions(self, extension):
if not extension:
return None
extensions = set(ext.lower().lstrip('.') for ext in extension.split(':'))
if not all(ext in TEST_EXTENSIONS for ext in extensions):
raise DataError("Invalid extension to limit parsing '%s'." % extension)
return extensions
def build(self, *paths):
"""
:param paths: Paths to test data files or directories.
:return: :class:`~robot.running.model.TestSuite` instance.
"""
if not paths:
raise DataError('One or more source paths required.')
if len(paths) == 1:
return self._parse_and_build(paths[0])
root = TestSuite()
for path in paths:
root.suites.append(self._parse_and_build(path))
root.rpa = self.rpa
return root
def _parse_and_build(self, path):
suite = self._build_suite(self._parse(path))
suite.remove_empty_suites()
return suite
def _parse(self, path):
try:
return TestData(source=abspath(path),
include_suites=self.include_suites,
extensions=self.extensions)
except DataError as err:
raise DataError("Parsing '%s' failed: %s" % (path, err.message))
def _build_suite(self, data, parent_defaults=None):
if self._rpa_not_given and data.testcase_table.is_started():
self._set_execution_mode(data)
self._check_deprecated_extensions(data.source)
defaults = TestDefaults(data.setting_table, parent_defaults)
suite = TestSuite(name=data.name,
source=data.source,
doc=unic(data.setting_table.doc),
metadata=self._get_metadata(data.setting_table))
self._build_setup(suite, data.setting_table.suite_setup)
self._build_teardown(suite, data.setting_table.suite_teardown)
for test_data in data.testcase_table.tests:
self._build_test(suite, test_data, defaults)
for child in data.children:
suite.suites.append(self._build_suite(child, defaults))
suite.rpa = self.rpa
ResourceFileBuilder().build(data, target=suite.resource)
return suite
def _set_execution_mode(self, data):
rpa = normalize(data.testcase_table.header[0]) in ('task', 'tasks')
if self.rpa is None:
self.rpa = rpa
elif self.rpa is not rpa:
this, that = ('tasks', 'tests') if rpa else ('tests', 'tasks')
raise DataError("Conflicting execution modes. File '%s' has %s "
"but files parsed earlier have %s. Fix headers "
"or use '--rpa' or '--norpa' options to set the "
"execution mode explicitly."
% (data.source, this, that))
def _check_deprecated_extensions(self, source):
if os.path.isdir(source):
return
ext = os.path.splitext(source)[1][1:].lower()
if self.extensions and ext in self.extensions:
return
# HTML files cause deprecation warning that cannot be avoided with
# --extension at parsing time. No need for double warning.
if ext not in ('robot', 'html', 'htm', 'xhtml'):
LOGGER.warn("Automatically parsing other than '*.robot' files is "
"deprecated. Convert '%s' to '*.robot' format or use "
"'--extension' to explicitly configure which files to "
"parse." % source)
def _get_metadata(self, settings):
# Must return as a list to preserve ordering
return [(meta.name, meta.value) for meta in settings.metadata]
def _build_test(self, suite, data, defaults):
values = defaults.get_test_values(data)
template = self._get_template(values.template)
test = suite.tests.create(name=data.name,
doc=unic(data.doc),
tags=values.tags.value,
template=template,
timeout=self._get_timeout(values.timeout))
self._build_setup(test, values.setup)
self._build_steps(test, data, template)
self._build_teardown(test, values.teardown)
def _get_timeout(self, timeout):
return (timeout.value, timeout.message) if timeout else None
def _get_template(self, template):
return unic(template) if template.is_active() else None
def _build_setup(self, parent, data):
if data.is_active():
self._build_step(parent, data, kw_type='setup')
def _build_teardown(self, parent, data):
if data.is_active():
self._build_step(parent, data, kw_type='teardown')
class ResourceFileBuilder(object):
def __init__(self):
builder = StepBuilder()
self._build_steps = builder.build_steps
self._build_step = builder.build_step
def build(self, path_or_data, target=None):
data, source = self._import_resource_if_needed(path_or_data)
if not target:
target = ResourceFile(doc=data.setting_table.doc.value, source=source)
self._build_imports(target, data.setting_table.imports)
self._build_variables(target, data.variable_table.variables)
for kw_data in data.keyword_table.keywords:
self._build_keyword(target, kw_data)
return target
def _import_resource_if_needed(self, path_or_data):
if not is_string(path_or_data):
return path_or_data, path_or_data.source
return ResourceData(path_or_data).populate(), path_or_data
def _build_imports(self, target, imports):
for data in imports:
target.imports.create(type=data.type,
name=data.name,
args=tuple(data.args),
alias=data.alias)
def _build_variables(self, target, variables):
for data in variables:
if data:
target.variables.create(name=data.name, value=data.value)
def _build_keyword(self, target, data):
kw = target.keywords.create(name=data.name,
args=tuple(data.args),
doc=unic(data.doc),
tags=tuple(data.tags),
return_=tuple(data.return_),
timeout=self._get_timeout(data.timeout))
self._build_steps(kw, data)
if data.teardown.is_active():
self._build_step(kw, data.teardown, kw_type='teardown')
def _get_timeout(self, timeout):
return (timeout.value, timeout.message) if timeout else None
class StepBuilder(object):
def build_steps(self, parent, data, template=None, kw_type='kw'):
steps = [self._build(step, template, kw_type) for step in data.steps
if step and not step.is_comment()]
parent.keywords.extend(steps)
def build_step(self, parent, data, template=None, kw_type='kw'):
if data and not data.is_comment():
step = self._build(data, template, kw_type)
parent.keywords.append(step)
def _build(self, data, template=None, kw_type='kw'):
if data.is_for_loop():
return self._build_for_loop(data, template)
if template:
return self._build_templated_step(data, template)
return self._build_normal_step(data, kw_type)
def _build_for_loop(self, data, template):
loop = ForLoop(variables=data.vars,
values=data.items,
flavor=data.flavor)
self.build_steps(loop, data, template)
return loop
def _build_templated_step(self, data, template):
args = data.as_list(include_comment=False)
template, args = self._format_template(template, args)
return Keyword(name=template, args=args)
def _format_template(self, template, args):
iterator = VariableIterator(template, identifiers='$')
variables = len(iterator)
if not variables or variables != len(args):
return template, tuple(args)
temp = []
for before, variable, after in iterator:
temp.extend([before, args.pop(0)])
temp.append(after)
return ''.join(temp), ()
def _build_normal_step(self, data, kw_type):
return Keyword(name=data.name,
args=tuple(data.args),
assign=tuple(data.assign),
type=kw_type) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/builder.py | 0.609757 | 0.370681 | builder.py | pypi |
from java.lang import Byte, Short, Integer, Long, Boolean, Float, Double
from robotide.lib.robot.variables import contains_var
from robotide.lib.robot.utils import is_string, is_list_like
class JavaArgumentCoercer(object):
def __init__(self, signatures, argspec):
self._argspec = argspec
self._coercers = CoercerFinder().find_coercers(signatures)
self._varargs_handler = VarargsHandler(argspec)
def coerce(self, arguments, named, dryrun=False):
arguments = self._varargs_handler.handle(arguments)
arguments = [c.coerce(a, dryrun)
for c, a in zip(self._coercers, arguments)]
if self._argspec.kwargs:
arguments.append(dict(named))
return arguments
class CoercerFinder(object):
def find_coercers(self, signatures):
return [self._get_coercer(types, position)
for position, types in self._parse_types(signatures)]
def _parse_types(self, signatures):
types = {}
for sig in signatures:
for index, arg in enumerate(sig.args):
types.setdefault(index + 1, []).append(arg)
return sorted(types.items())
def _get_coercer(self, types, position):
possible = [BooleanCoercer(position), IntegerCoercer(position),
FloatCoercer(position), NullCoercer(position)]
coercers = [self._get_coercer_for_type(t, possible) for t in types]
if self._coercers_conflict(*coercers):
return NullCoercer()
return coercers[0]
def _get_coercer_for_type(self, type, coercers):
for coercer in coercers:
if coercer.handles(type):
return coercer
def _coercers_conflict(self, first, *rest):
return not all(coercer is first for coercer in rest)
class _Coercer(object):
_name = ''
_types = []
_primitives = []
def __init__(self, position=None):
self._position = position
def handles(self, type):
return type in self._types or type.__name__ in self._primitives
def coerce(self, argument, dryrun=False):
if not is_string(argument) \
or (dryrun and contains_var(argument)):
return argument
try:
return self._coerce(argument)
except ValueError:
raise ValueError('Argument at position %d cannot be coerced to %s.'
% (self._position, self._name))
def _coerce(self, argument):
raise NotImplementedError
class BooleanCoercer(_Coercer):
_name = 'boolean'
_types = [Boolean]
_primitives = ['boolean']
def _coerce(self, argument):
try:
return {'false': False, 'true': True}[argument.lower()]
except KeyError:
raise ValueError
class IntegerCoercer(_Coercer):
_name = 'integer'
_types = [Byte, Short, Integer, Long]
_primitives = ['byte', 'short', 'int', 'long']
def _coerce(self, argument):
return int(argument)
class FloatCoercer(_Coercer):
_name = 'floating point number'
_types = [Float, Double]
_primitives = ['float', 'double']
def _coerce(self, argument):
return float(argument)
class NullCoercer(_Coercer):
def handles(self, argument):
return True
def _coerce(self, argument):
return argument
class VarargsHandler(object):
def __init__(self, argspec):
self._index = argspec.minargs if argspec.varargs else -1
def handle(self, arguments):
if self._index > -1 and not self._passing_list(arguments):
arguments[self._index:] = [arguments[self._index:]]
return arguments
def _passing_list(self, arguments):
return self._correct_count(arguments) and is_list_like(arguments[-1])
def _correct_count(self, arguments):
return len(arguments) == self._index + 1 | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/javaargumentcoercer.py | 0.789031 | 0.226698 | javaargumentcoercer.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import plural_or_not, seq2str
from robotide.lib.robot.variables import is_list_var
class ArgumentValidator(object):
def __init__(self, argspec):
""":type argspec: :py:class:`robot.running.arguments.ArgumentSpec`"""
self._argspec = argspec
def validate(self, positional, named, dryrun=False):
if dryrun and any(is_list_var(arg) for arg in positional):
return
named = set(name for name, value in named)
self._validate_no_multiple_values(positional, named, self._argspec)
self._validate_positional_limits(positional, named, self._argspec)
self._validate_no_mandatory_missing(positional, named, self._argspec)
self._validate_no_named_only_missing(named, self._argspec)
self._validate_no_extra_named(named, self._argspec)
def _validate_positional_limits(self, positional, named, spec):
count = len(positional) + self._named_positionals(named, spec)
if not spec.minargs <= count <= spec.maxargs:
self._raise_wrong_count(count, spec)
def _named_positionals(self, named, spec):
if not spec.supports_named:
return 0
return sum(1 for n in named if n in spec.positional)
def _raise_wrong_count(self, count, spec):
minend = plural_or_not(spec.minargs)
if spec.minargs == spec.maxargs:
expected = '%d argument%s' % (spec.minargs, minend)
elif not spec.varargs:
expected = '%d to %d arguments' % (spec.minargs, spec.maxargs)
else:
expected = 'at least %d argument%s' % (spec.minargs, minend)
if spec.kwargs or spec.kwonlyargs:
expected = expected.replace('argument', 'non-named argument')
raise DataError("%s '%s' expected %s, got %d."
% (spec.type, spec.name, expected, count))
def _validate_no_multiple_values(self, positional, named, spec):
if named and spec.supports_named:
for name in spec.positional[:len(positional)]:
if name in named:
raise DataError("%s '%s' got multiple values for argument "
"'%s'." % (spec.type, spec.name, name))
def _validate_no_mandatory_missing(self, positional, named, spec):
for name in spec.positional[len(positional):spec.minargs]:
if name not in named:
raise DataError("%s '%s' missing value for argument '%s'."
% (spec.type, spec.name, name))
def _validate_no_named_only_missing(self, named, spec):
defined = set(named) | set(spec.defaults)
missing = [arg for arg in spec.kwonlyargs if arg not in defined]
if missing:
raise DataError("%s '%s' missing named-only argument%s %s."
% (spec.type, spec.name, plural_or_not(missing),
seq2str(sorted(missing))))
def _validate_no_extra_named(self, named, spec):
if not spec.kwargs:
extra = set(named) - set(spec.positional) - set(spec.kwonlyargs)
if extra:
raise DataError("%s '%s' got unexpected named argument%s %s."
% (spec.type, spec.name, plural_or_not(extra),
seq2str(sorted(extra)))) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/argumentvalidator.py | 0.746693 | 0.283633 | argumentvalidator.py | pypi |
from robotide.lib.robot.errors import DataError
class ArgumentMapper(object):
def __init__(self, argspec):
""":type argspec: :py:class:`robot.running.arguments.ArgumentSpec`"""
self._argspec = argspec
def map(self, positional, named, replace_defaults=True):
template = KeywordCallTemplate(self._argspec)
template.fill_positional(positional)
template.fill_named(named)
if replace_defaults:
template.replace_defaults()
return template.args, template.kwargs
class KeywordCallTemplate(object):
def __init__(self, argspec):
""":type argspec: :py:class:`robot.running.arguments.ArgumentSpec`"""
self._argspec = argspec
self.args = [None if arg not in argspec.defaults
else DefaultValue(argspec.defaults[arg])
for arg in argspec.positional]
self.kwargs = []
def fill_positional(self, positional):
self.args[:len(positional)] = positional
def fill_named(self, named):
spec = self._argspec
for name, value in named:
if name in spec.positional and spec.supports_named:
index = spec.positional.index(name)
self.args[index] = value
elif spec.kwargs or name in spec.kwonlyargs:
self.kwargs.append((name, value))
else:
raise DataError("Non-existing named argument '%s'." % name)
named_names = {name for name, _ in named}
for name in spec.kwonlyargs:
if name not in named_names:
value = DefaultValue(spec.defaults[name])
self.kwargs.append((name, value))
def replace_defaults(self):
is_default = lambda arg: isinstance(arg, DefaultValue)
while self.args and is_default(self.args[-1]):
self.args.pop()
self.args = [a if not is_default(a) else a.value for a in self.args]
self.kwargs = [(n, v) for n, v in self.kwargs if not is_default(v)]
class DefaultValue(object):
def __init__(self, value):
self.value = value
def resolve(self, variables):
try:
return variables.replace_scalar(self.value)
except DataError as err:
raise DataError('Resolving argument default values failed: %s'
% err.message) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/argumentmapper.py | 0.80038 | 0.242834 | argumentmapper.py | pypi |
import re
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import get_error_message, py2to3
from robotide.lib.robot.variables import VariableIterator
@py2to3
class EmbeddedArguments(object):
def __init__(self, name):
if '${' in name:
self.name, self.args = EmbeddedArgumentParser().parse(name)
else:
self.name, self.args = None, []
def __nonzero__(self):
return self.name is not None
class EmbeddedArgumentParser(object):
_regexp_extension = re.compile(r'(?<!\\)\(\?.+\)')
_regexp_group_start = re.compile(r'(?<!\\)\((.*?)\)')
_regexp_group_escape = r'(?:\1)'
_default_pattern = '.*?'
_variable_pattern = r'\$\{[^\}]+\}'
def parse(self, string):
args = []
name_regexp = ['^']
for before, variable, string in VariableIterator(string, identifiers='$'):
name, pattern = self._get_name_and_pattern(variable[2:-1])
args.append(name)
name_regexp.extend([re.escape(before), '(%s)' % pattern])
name_regexp.extend([re.escape(string), '$'])
name = self._compile_regexp(name_regexp) if args else None
return name, args
def _get_name_and_pattern(self, name):
if ':' not in name:
return name, self._default_pattern
name, pattern = name.split(':', 1)
return name, self._format_custom_regexp(pattern)
def _format_custom_regexp(self, pattern):
for formatter in (self._regexp_extensions_are_not_allowed,
self._make_groups_non_capturing,
self._unescape_closing_curly,
self._add_automatic_variable_pattern):
pattern = formatter(pattern)
return pattern
def _regexp_extensions_are_not_allowed(self, pattern):
if not self._regexp_extension.search(pattern):
return pattern
raise DataError('Regexp extensions are not allowed in embedded '
'arguments.')
def _make_groups_non_capturing(self, pattern):
return self._regexp_group_start.sub(self._regexp_group_escape, pattern)
def _unescape_closing_curly(self, pattern):
return pattern.replace('\\}', '}')
def _add_automatic_variable_pattern(self, pattern):
return '%s|%s' % (pattern, self._variable_pattern)
def _compile_regexp(self, pattern):
try:
return re.compile(''.join(pattern), re.IGNORECASE)
except:
raise DataError("Compiling embedded arguments regexp failed: %s"
% get_error_message()) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/embedded.py | 0.511473 | 0.180974 | embedded.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.utils import is_string, is_dict_like, split_from_equals
from robotide.lib.robot.variables import VariableSplitter
from .argumentvalidator import ArgumentValidator
class ArgumentResolver(object):
def __init__(self, argspec, resolve_named=True,
resolve_variables_until=None, dict_to_kwargs=False):
self._named_resolver = NamedArgumentResolver(argspec) \
if resolve_named else NullNamedArgumentResolver()
self._variable_replacer = VariableReplacer(resolve_variables_until)
self._dict_to_kwargs = DictToKwargs(argspec, dict_to_kwargs)
self._argument_validator = ArgumentValidator(argspec)
def resolve(self, arguments, variables=None):
positional, named = self._named_resolver.resolve(arguments, variables)
positional, named = self._variable_replacer.replace(positional, named,
variables)
positional, named = self._dict_to_kwargs.handle(positional, named)
self._argument_validator.validate(positional, named,
dryrun=variables is None)
return positional, named
class NamedArgumentResolver(object):
def __init__(self, argspec):
self._argspec = argspec
def resolve(self, arguments, variables=None):
positional = []
named = []
for arg in arguments:
if self._is_dict_var(arg):
named.append(arg)
elif self._is_named(arg, named, variables):
named.append(split_from_equals(arg))
elif named:
self._raise_positional_after_named()
else:
positional.append(arg)
return positional, named
def _is_dict_var(self, arg):
return (is_string(arg) and arg[:2] == '&{' and arg[-1] == '}' and
VariableSplitter(arg).is_dict_variable())
def _is_named(self, arg, previous_named, variables=None):
name, value = split_from_equals(arg)
if value is None:
return False
if variables:
name = variables.replace_scalar(name)
argspec = self._argspec
if previous_named or name in argspec.kwonlyargs or argspec.kwargs:
return True
return argspec.supports_named and name in argspec.positional
def _raise_positional_after_named(self):
raise DataError("%s '%s' got positional argument after named arguments."
% (self._argspec.type, self._argspec.name))
class NullNamedArgumentResolver(object):
def resolve(self, arguments, variables=None):
return arguments, {}
class DictToKwargs(object):
def __init__(self, argspec, enabled=False):
self._maxargs = argspec.maxargs
self._enabled = enabled and bool(argspec.kwargs)
def handle(self, positional, named):
if self._enabled and self._extra_arg_has_kwargs(positional, named):
named = positional.pop().items()
return positional, named
def _extra_arg_has_kwargs(self, positional, named):
if named or len(positional) != self._maxargs + 1:
return False
return is_dict_like(positional[-1])
class VariableReplacer(object):
def __init__(self, resolve_until=None):
self._resolve_until = resolve_until
def replace(self, positional, named, variables=None):
# `variables` is None in dry-run mode and when using Libdoc.
if variables:
positional = variables.replace_list(positional, self._resolve_until)
named = list(self._replace_named(named, variables.replace_scalar))
else:
positional = list(positional)
named = [item for item in named if isinstance(item, tuple)]
return positional, named
def _replace_named(self, named, replace_scalar):
for item in named:
for name, value in self._get_replaced_named(item, replace_scalar):
if not is_string(name):
raise DataError('Argument names must be strings.')
yield name, value
def _get_replaced_named(self, item, replace_scalar):
if not isinstance(item, tuple):
return replace_scalar(item).items()
name, value = item
return [(replace_scalar(name), replace_scalar(value))] | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/argumentresolver.py | 0.805058 | 0.194139 | argumentresolver.py | pypi |
from ast import literal_eval
from collections import OrderedDict
try:
from collections import abc
except ImportError: # Python 2
import collections as abc
from datetime import datetime, date, timedelta
from decimal import InvalidOperation, Decimal
try:
from enum import Enum
except ImportError: # Standard in Py 3.4+ but can be separately installed
class Enum(object):
pass
from numbers import Integral, Real
from robotide.lib.robot.libraries.DateTime import convert_date, convert_time
from robotide.lib.robot.utils import (FALSE_STRINGS, IRONPYTHON, TRUE_STRINGS, PY_VERSION,
PY2, seq2str, type_name, unicode)
class TypeConverter(object):
type = None
abc = None
aliases = ()
convert_none = True
_converters = OrderedDict()
_type_aliases = {}
@property
def type_name(self):
return self.type.__name__.lower()
@classmethod
def register(cls, converter_class):
converter = converter_class()
cls._converters[converter.type] = converter
for name in (converter.type_name,) + converter.aliases:
if name is not None:
cls._type_aliases[name.lower()] = converter.type
return converter_class
@classmethod
def converter_for(cls, type_):
# Types defined in the typing module in Python 3.7+. For details see
# https://bugs.python.org/issue34568
if PY_VERSION >= (3, 7) and hasattr(type_, '__origin__'):
type_ = type_.__origin__
if isinstance(type_, (str, unicode)):
try:
type_ = cls._type_aliases[type_.lower()]
except KeyError:
return None
if not isinstance(type_, type) or issubclass(type_, unicode):
return None
if type_ in cls._converters:
return cls._converters[type_]
for converter in cls._converters.values():
if converter.handles(type_):
return converter.get_converter(type_)
return None
def handles(self, type_):
return (issubclass(type_, self.type) or
self.abc and issubclass(type_, self.abc))
def get_converter(self, type_):
return self
def convert(self, name, value, explicit_type=True):
if self.convert_none and value.upper() == 'NONE':
return None
try:
return self._convert(value, explicit_type)
except ValueError as error:
return self._handle_error(name, value, error, explicit_type)
def _convert(self, value, explicit_type=True):
raise NotImplementedError
def _handle_error(self, name, value, error, explicit_type=True):
if not explicit_type:
return value
ending = u': %s' % error if error.args else '.'
raise ValueError("Argument '%s' got value '%s' that cannot be "
"converted to %s%s"
% (name, value, self.type_name, ending))
def _literal_eval(self, value, expected):
# ast.literal_eval has some issues with sets:
if expected is set:
# On Python 2 it doesn't handle sets at all.
if PY2:
raise ValueError('Sets are not supported on Python 2.')
# There is no way to define an empty set.
if value == 'set()':
return set()
try:
value = literal_eval(value)
except (ValueError, SyntaxError):
# Original errors aren't too informative in these cases.
raise ValueError('Invalid expression.')
except TypeError as err:
raise ValueError('Evaluating expression failed: %s' % err)
if not isinstance(value, expected):
raise ValueError('Value is %s, not %s.' % (type_name(value),
expected.__name__))
return value
@TypeConverter.register
class BooleanConverter(TypeConverter):
type = bool
type_name = 'boolean'
aliases = ('bool',)
def _convert(self, value, explicit_type=True):
upper = value.upper()
if upper in TRUE_STRINGS:
return True
if upper in FALSE_STRINGS:
return False
return value
@TypeConverter.register
class IntegerConverter(TypeConverter):
type = int
abc = Integral
type_name = 'integer'
aliases = ('int', 'long')
def _convert(self, value, explicit_type=True):
try:
return int(value)
except ValueError:
if not explicit_type:
try:
return float(value)
except ValueError:
pass
raise ValueError
@TypeConverter.register
class FloatConverter(TypeConverter):
type = float
abc = Real
aliases = ('double',)
def _convert(self, value, explicit_type=True):
try:
return float(value)
except ValueError:
raise ValueError
@TypeConverter.register
class DecimalConverter(TypeConverter):
type = Decimal
def _convert(self, value, explicit_type=True):
try:
return Decimal(value)
except InvalidOperation:
# With Python 3 error messages by decimal module are not very
# useful and cannot be included in our error messages:
# https://bugs.python.org/issue26208
raise ValueError
@TypeConverter.register
class BytesConverter(TypeConverter):
type = bytes
abc = getattr(abc, 'ByteString', None) # ByteString is new in Python 3
type_name = 'bytes' # Needed on Python 2
convert_none = False
def _convert(self, value, explicit_type=True):
if PY2 and not explicit_type:
return value
try:
value = value.encode('latin-1')
except UnicodeEncodeError as err:
raise ValueError("Character '%s' cannot be mapped to a byte."
% value[err.start:err.start+1])
return value if not IRONPYTHON else bytes(value)
@TypeConverter.register
class ByteArrayConverter(TypeConverter):
type = bytearray
convert_none = False
def _convert(self, value, explicit_type=True):
try:
return bytearray(value, 'latin-1')
except UnicodeEncodeError as err:
raise ValueError("Character '%s' cannot be mapped to a byte."
% value[err.start:err.start+1])
@TypeConverter.register
class DateTimeConverter(TypeConverter):
type = datetime
def _convert(self, value, explicit_type=True):
return convert_date(value, result_format='datetime')
@TypeConverter.register
class DateConverter(TypeConverter):
type = date
def _convert(self, value, explicit_type=True):
dt = convert_date(value, result_format='datetime')
if dt.hour or dt.minute or dt.second or dt.microsecond:
raise ValueError("Value is datetime, not date.")
return dt.date()
@TypeConverter.register
class TimeDeltaConverter(TypeConverter):
type = timedelta
def _convert(self, value, explicit_type=True):
return convert_time(value, result_format='timedelta')
@TypeConverter.register
class EnumConverter(TypeConverter):
type = Enum
def __init__(self, enum=None):
self._enum = enum
@property
def type_name(self):
return self._enum.__name__ if self._enum else None
def get_converter(self, type_):
return EnumConverter(type_)
def _convert(self, value, explicit_type=True):
try:
# This is compatible with the enum module in Python 3.4, its
# enum34 backport, and the older enum module. `self._enum[value]`
# wouldn't work with the old enum module.
return getattr(self._enum, value)
except AttributeError:
members = self._get_members(self._enum)
raise ValueError("%s does not have member '%s'. Available: %s"
% (self.type_name, value, seq2str(members)))
def _get_members(self, enum):
try:
return list(enum.__members__)
except AttributeError: # old enum module
return [attr for attr in dir(enum) if not attr.startswith('_')]
@TypeConverter.register
class NoneConverter(TypeConverter):
type = type(None)
def _convert(self, value, explicit_type=True):
return value
@TypeConverter.register
class ListConverter(TypeConverter):
type = list
abc = abc.Sequence
def _convert(self, value, explicit_type=True):
return self._literal_eval(value, list)
@TypeConverter.register
class TupleConverter(TypeConverter):
type = tuple
def _convert(self, value, explicit_type=True):
return self._literal_eval(value, tuple)
@TypeConverter.register
class DictionaryConverter(TypeConverter):
type = dict
abc = abc.Mapping
type_name = 'dictionary'
aliases = ('dict', 'map')
def _convert(self, value, explicit_type=True):
return self._literal_eval(value, dict)
@TypeConverter.register
class SetConverter(TypeConverter):
type = set
abc = abc.Set
def _convert(self, value, explicit_type=True):
return self._literal_eval(value, set)
@TypeConverter.register
class FrozenSetConverter(TypeConverter):
type = frozenset
def _convert(self, value, explicit_type=True):
# There are issues w/ literal_eval. See self._literal_eval for details.
if value == 'frozenset()' and not PY2:
return frozenset()
return frozenset(self._literal_eval(value, set)) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/running/arguments/typeconverters.py | 0.796886 | 0.224884 | typeconverters.py | pypi |
from robotide.lib.robot.errors import DataError
class XmlElementHandler(object):
def __init__(self, execution_result, root_handler=None):
self._stack = [(root_handler or RootHandler(), execution_result)]
def start(self, elem):
handler, result = self._stack[-1]
handler = handler.get_child_handler(elem)
result = handler.start(elem, result)
self._stack.append((handler, result))
def end(self, elem):
handler, result = self._stack.pop()
handler.end(elem, result)
class _Handler(object):
def __init__(self):
self._child_handlers = dict((c.tag, c) for c in self._children())
def _children(self):
return []
def get_child_handler(self, elem):
try:
return self._child_handlers[elem.tag]
except KeyError:
raise DataError("Incompatible XML element '%s'." % elem.tag)
def start(self, elem, result):
return result
def end(self, elem, result):
pass
def _timestamp(self, elem, attr_name):
timestamp = elem.get(attr_name)
return timestamp if timestamp != 'N/A' else None
class RootHandler(_Handler):
def _children(self):
return [RobotHandler()]
class RobotHandler(_Handler):
tag = 'robot'
def start(self, elem, result):
generator = elem.get('generator', 'unknown').split()[0].upper()
result.generated_by_robot = generator == 'ROBOT'
if result.rpa is None:
result.rpa = elem.get('rpa', 'false') == 'true'
return result
def _children(self):
return [RootSuiteHandler(), StatisticsHandler(), ErrorsHandler()]
class SuiteHandler(_Handler):
tag = 'suite'
def start(self, elem, result):
return result.suites.create(name=elem.get('name', ''),
source=elem.get('source'),
rpa=result.rpa)
def _children(self):
return [DocHandler(), MetadataHandler(), SuiteStatusHandler(),
KeywordHandler(), TestCaseHandler(), self]
class RootSuiteHandler(SuiteHandler):
def start(self, elem, result):
result.suite.name = elem.get('name', '')
result.suite.source = elem.get('source')
result.suite.rpa = result.rpa
return result.suite
def _children(self):
return SuiteHandler._children(self)[:-1] + [SuiteHandler()]
class TestCaseHandler(_Handler):
tag = 'test'
def start(self, elem, result):
return result.tests.create(name=elem.get('name', ''))
def _children(self):
return [DocHandler(), TagsHandler(), TimeoutHandler(),
TestStatusHandler(), KeywordHandler()]
class KeywordHandler(_Handler):
tag = 'kw'
def start(self, elem, result):
return result.keywords.create(kwname=elem.get('name', ''),
libname=elem.get('library', ''),
type=elem.get('type', 'kw'))
def _children(self):
return [DocHandler(), ArgumentsHandler(), AssignHandler(),
TagsHandler(), TimeoutHandler(), KeywordStatusHandler(),
MessageHandler(), self]
class MessageHandler(_Handler):
tag = 'msg'
def end(self, elem, result):
result.messages.create(elem.text or '',
elem.get('level', 'INFO'),
elem.get('html', 'no') == 'yes',
self._timestamp(elem, 'timestamp'))
class _StatusHandler(_Handler):
tag = 'status'
def _set_status(self, elem, result):
result.status = elem.get('status', 'FAIL')
def _set_message(self, elem, result):
result.message = elem.text or ''
def _set_times(self, elem, result):
result.starttime = self._timestamp(elem, 'starttime')
result.endtime = self._timestamp(elem, 'endtime')
class KeywordStatusHandler(_StatusHandler):
def end(self, elem, result):
self._set_status(elem, result)
self._set_times(elem, result)
if result.type == result.TEARDOWN_TYPE:
self._set_message(elem, result)
class SuiteStatusHandler(_StatusHandler):
def end(self, elem, result):
self._set_message(elem, result)
self._set_times(elem, result)
class TestStatusHandler(_StatusHandler):
def end(self, elem, result):
self._set_status(elem, result)
self._set_message(elem, result)
self._set_times(elem, result)
class DocHandler(_Handler):
tag = 'doc'
def end(self, elem, result):
result.doc = elem.text or ''
class MetadataHandler(_Handler):
tag = 'metadata'
def _children(self):
return [MetadataItemHandler()]
class MetadataItemHandler(_Handler):
tag = 'item'
def end(self, elem, result):
result.metadata[elem.get('name', '')] = elem.text or ''
class TagsHandler(_Handler):
tag = 'tags'
def _children(self):
return [TagHandler()]
class TagHandler(_Handler):
tag = 'tag'
def end(self, elem, result):
result.tags.add(elem.text or '')
class TimeoutHandler(_Handler):
tag = 'timeout'
def end(self, elem, result):
result.timeout = elem.get('value')
class AssignHandler(_Handler):
tag = 'assign'
def _children(self):
return [AssignVarHandler()]
class AssignVarHandler(_Handler):
tag = 'var'
def end(self, elem, result):
result.assign += (elem.text or '',)
class ArgumentsHandler(_Handler):
tag = 'arguments'
def _children(self):
return [ArgumentHandler()]
class ArgumentHandler(_Handler):
tag = 'arg'
def end(self, elem, result):
result.args += (elem.text or '',)
class ErrorsHandler(_Handler):
tag = 'errors'
def start(self, elem, result):
return result.errors
def _children(self):
return [MessageHandler()]
class StatisticsHandler(_Handler):
tag = 'statistics'
def get_child_handler(self, elem):
return self | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/xmlelementhandlers.py | 0.629775 | 0.178472 | xmlelementhandlers.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.model import SuiteVisitor, TagPattern
from robotide.lib.robot.utils import Matcher, plural_or_not
def KeywordRemover(how):
upper = how.upper()
if upper.startswith('NAME:'):
return ByNameKeywordRemover(pattern=how[5:])
if upper.startswith('TAG:'):
return ByTagKeywordRemover(pattern=how[4:])
try:
return {'ALL': AllKeywordsRemover,
'PASSED': PassedKeywordRemover,
'FOR': ForLoopItemsRemover,
'WUKS': WaitUntilKeywordSucceedsRemover}[upper]()
except KeyError:
raise DataError("Expected 'ALL', 'PASSED', 'NAME:<pattern>', 'TAG:<pattern>', "
"'FOR', or 'WUKS' but got '%s'." % how)
class _KeywordRemover(SuiteVisitor):
_message = 'Keyword data removed using --RemoveKeywords option.'
def __init__(self):
self._removal_message = RemovalMessage(self._message)
def _clear_content(self, kw):
kw.keywords = []
kw.messages = []
self._removal_message.set(kw)
def _failed_or_warning_or_error(self, item):
return not item.passed or self._warning_or_error(item)
def _warning_or_error(self, item):
finder = WarningAndErrorFinder()
item.visit(finder)
return finder.found
class AllKeywordsRemover(_KeywordRemover):
def visit_keyword(self, keyword):
self._clear_content(keyword)
class PassedKeywordRemover(_KeywordRemover):
def start_suite(self, suite):
if not suite.statistics.all.failed:
for keyword in suite.keywords:
if not self._warning_or_error(keyword):
self._clear_content(keyword)
def visit_test(self, test):
if not self._failed_or_warning_or_error(test):
for keyword in test.keywords:
self._clear_content(keyword)
def visit_keyword(self, keyword):
pass
class ByNameKeywordRemover(_KeywordRemover):
def __init__(self, pattern):
_KeywordRemover.__init__(self)
self._matcher = Matcher(pattern, ignore='_')
def start_keyword(self, kw):
if self._matcher.match(kw.name) and not self._warning_or_error(kw):
self._clear_content(kw)
class ByTagKeywordRemover(_KeywordRemover):
def __init__(self, pattern):
_KeywordRemover.__init__(self)
self._pattern = TagPattern(pattern)
def start_keyword(self, kw):
if self._pattern.match(kw.tags) and not self._warning_or_error(kw):
self._clear_content(kw)
class ForLoopItemsRemover(_KeywordRemover):
_message = '%d passing step%s removed using --RemoveKeywords option.'
def start_keyword(self, kw):
if kw.type == kw.FOR_LOOP_TYPE:
before = len(kw.keywords)
kw.keywords = self._remove_keywords(kw.keywords)
self._removal_message.set_if_removed(kw, before)
def _remove_keywords(self, keywords):
return [kw for kw in keywords
if self._failed_or_warning_or_error(kw) or kw is keywords[-1]]
class WaitUntilKeywordSucceedsRemover(_KeywordRemover):
_message = '%d failing step%s removed using --RemoveKeywords option.'
def start_keyword(self, kw):
if kw.name == 'BuiltIn.Wait Until Keyword Succeeds' and kw.keywords:
before = len(kw.keywords)
kw.keywords = self._remove_keywords(list(kw.keywords))
self._removal_message.set_if_removed(kw, before)
def _remove_keywords(self, keywords):
include_from_end = 2 if keywords[-1].passed else 1
return self._kws_with_warnings(keywords[:-include_from_end]) \
+ keywords[-include_from_end:]
def _kws_with_warnings(self, keywords):
return [kw for kw in keywords if self._warning_or_error(kw)]
class WarningAndErrorFinder(SuiteVisitor):
def __init__(self):
self.found = False
def start_suite(self, suite):
return not self.found
def start_test(self, test):
return not self.found
def start_keyword(self, keyword):
return not self.found
def visit_message(self, msg):
if msg.level in ('WARN', 'ERROR'):
self.found = True
class RemovalMessage(object):
def __init__(self, message):
self._message = message
def set_if_removed(self, kw, len_before):
removed = len_before - len(kw.keywords)
if removed:
self.set(kw, self._message % (removed, plural_or_not(removed)))
def set(self, kw, message=None):
kw.doc = ('%s\n\n_%s_' % (kw.doc, message or self._message)).strip() | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/keywordremover.py | 0.542621 | 0.179351 | keywordremover.py | pypi |
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.model import Statistics
from .executionerrors import ExecutionErrors
from .model import TestSuite
class Result(object):
"""Test execution results.
Can be created based on XML output files using the
:func:`~.resultbuilder.ExecutionResult`
factory method. Also returned by the
:meth:`robot.running.TestSuite.run <robot.running.model.TestSuite.run>`
method.
"""
def __init__(self, source=None, root_suite=None, errors=None, rpa=None):
#: Path to the XML file where results are read from.
self.source = source
#: Hierarchical execution results as a
#: :class:`~.result.model.TestSuite` object.
self.suite = root_suite or TestSuite()
#: Execution errors as an
#: :class:`~.executionerrors.ExecutionErrors` object.
self.errors = errors or ExecutionErrors()
self.generated_by_robot = True
self._status_rc = True
self._stat_config = {}
self.rpa = rpa
@property
def statistics(self):
"""Test execution statistics.
Statistics are an instance of
:class:`~robot.model.statistics.Statistics` that is created based
on the contained ``suite`` and possible
:func:`configuration <configure>`.
Statistics are created every time this property is accessed. Saving
them to a variable is thus often a good idea to avoid re-creating
them unnecessarily::
from robotide.lib.robot.api import ExecutionResult
result = ExecutionResult('output.xml')
result.configure(stat_config={'suite_stat_level': 2,
'tag_stat_combine': 'tagANDanother'})
stats = result.statistics
print stats.total.critical.failed
print stats.total.critical.passed
print stats.tags.combined[0].total
"""
return Statistics(self.suite, rpa=self.rpa, **self._stat_config)
@property
def return_code(self):
"""Return code (integer) of test execution.
By default returns the number of failed critical tests (max 250),
but can be :func:`configured <configure>` to always return 0.
"""
if self._status_rc:
return min(self.suite.statistics.critical.failed, 250)
return 0
def configure(self, status_rc=True, suite_config=None, stat_config=None):
"""Configures the result object and objects it contains.
:param status_rc: If set to ``False``, :attr:`return_code` always
returns 0.
:param suite_config: A dictionary of configuration options passed
to :meth:`~.result.testsuite.TestSuite.configure` method of
the contained ``suite``.
:param stat_config: A dictionary of configuration options used when
creating :attr:`statistics`.
"""
if suite_config:
self.suite.configure(**suite_config)
self._status_rc = status_rc
self._stat_config = stat_config or {}
def save(self, path=None):
"""Save results as a new output XML file.
:param path: Path to save results to. If omitted, overwrites the
original file.
"""
from robotide.lib.robot.reporting.outputwriter import OutputWriter
self.visit(OutputWriter(path or self.source, rpa=self.rpa))
def visit(self, visitor):
"""An entry point to visit the whole result object.
:param visitor: An instance of :class:`~.visitor.ResultVisitor`.
Visitors can gather information, modify results, etc. See
:mod:`~robot.result` package for a simple usage example.
Notice that it is also possible to call :meth:`result.suite.visit
<robot.result.testsuite.TestSuite.visit>` if there is no need to
visit the contained ``statistics`` or ``errors``.
"""
visitor.visit_result(self)
def handle_suite_teardown_failures(self):
"""Internal usage only."""
if self.generated_by_robot:
self.suite.handle_suite_teardown_failures()
def set_execution_mode(self, other):
"""Set execution mode based on other result. Internal usage only."""
if other.rpa is None:
pass
elif self.rpa is None:
self.rpa = other.rpa
elif self.rpa is not other.rpa:
this, that = ('task', 'test') if other.rpa else ('test', 'task')
raise DataError("Conflicting execution modes. File '%s' has %ss "
"but files parsed earlier have %ss. Use '--rpa' "
"or '--norpa' options to set the execution mode "
"explicitly." % (other.source, this, that))
class CombinedResult(Result):
"""Combined results of multiple test executions."""
def __init__(self, results=None):
Result.__init__(self)
for result in results or ():
self.add_result(result)
def add_result(self, other):
self.set_execution_mode(other)
self.suite.suites.append(other.suite)
self.errors.add(other.errors) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/executionresult.py | 0.906068 | 0.373676 | executionresult.py | pypi |
from robotide.lib.robot import model
from robotide.lib.robot.utils import is_string, secs_to_timestamp, timestamp_to_secs
class SuiteConfigurer(model.SuiteConfigurer):
"""Result suite configured.
Calls suite's
:meth:`~robot.result.testsuite.TestSuite.remove_keywords`,
:meth:`~robot.result.testsuite.TestSuite.filter_messages` and
:meth:`~robot.result.testsuite.TestSuite.set_criticality` methods
and sets its start and end time based on the given named parameters.
``base_config`` is forwarded to
:class:`robot.model.SuiteConfigurer <robot.model.configurer.SuiteConfigurer>`
that will do further configuration based on them.
"""
def __init__(self, remove_keywords=None, log_level=None, start_time=None,
end_time=None, critical_tags=None, non_critical_tags=None,
**base_config):
model.SuiteConfigurer.__init__(self, **base_config)
self.remove_keywords = self._get_remove_keywords(remove_keywords)
self.log_level = log_level
self.start_time = self._get_time(start_time)
self.end_time = self._get_time(end_time)
self.critical_tags = critical_tags
self.non_critical_tags = non_critical_tags
def _get_remove_keywords(self, value):
if value is None:
return []
if is_string(value):
return [value]
return value
def _get_time(self, timestamp):
if not timestamp:
return None
try:
secs = timestamp_to_secs(timestamp, seps=' :.-_')
except ValueError:
return None
return secs_to_timestamp(secs, millis=True)
def visit_suite(self, suite):
model.SuiteConfigurer.visit_suite(self, suite)
self._remove_keywords(suite)
self._set_times(suite)
suite.filter_messages(self.log_level)
suite.set_criticality(self.critical_tags, self.non_critical_tags)
def _remove_keywords(self, suite):
for how in self.remove_keywords:
suite.remove_keywords(how)
def _set_times(self, suite):
if self.start_time:
suite.starttime = self.start_time
if self.end_time:
suite.endtime = self.end_time | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/configurer.py | 0.832169 | 0.386242 | configurer.py | pypi |
from robotide.lib.robot.model import SuiteVisitor
class ResultVisitor(SuiteVisitor):
"""Abstract class to conveniently travel :class:`~robot.result.executionresult.Result` objects.
A visitor implementation can be given to the :meth:`visit` method of a
result object. This will cause the result object to be traversed and the
visitor's :meth:`visit_x`, :meth:`start_x`, and :meth:`end_x` methods to
be called for each suite, test, keyword and message, as well as for errors,
statistics, and other information in the result object. See methods below
for a full list of available visitor methods.
See the :mod:`result package level <robot.result>` documentation for
more information about handling results and a concrete visitor example.
For more information about the visitor algorithm see documentation in
:mod:`robot.model.visitor` module.
"""
def visit_result(self, result):
if self.start_result(result) is not False:
result.suite.visit(self)
result.statistics.visit(self)
result.errors.visit(self)
self.end_result(result)
def start_result(self, result):
pass
def end_result(self, result):
pass
def visit_statistics(self, stats):
if self.start_statistics(stats) is not False:
stats.total.visit(self)
stats.tags.visit(self)
stats.suite.visit(self)
self.end_statistics(stats)
def start_statistics(self, stats):
pass
def end_statistics(self, stats):
pass
def visit_total_statistics(self, stats):
if self.start_total_statistics(stats) is not False:
for stat in stats:
stat.visit(self)
self.end_total_statistics(stats)
def start_total_statistics(self, stats):
pass
def end_total_statistics(self, stats):
pass
def visit_tag_statistics(self, stats):
if self.start_tag_statistics(stats) is not False:
for stat in stats:
stat.visit(self)
self.end_tag_statistics(stats)
def start_tag_statistics(self, stats):
pass
def end_tag_statistics(self, stats):
pass
def visit_suite_statistics(self, stats):
if self.start_suite_statistics(stats) is not False:
for stat in stats:
stat.visit(self)
self.end_suite_statistics(stats)
def start_suite_statistics(self, stats):
pass
def end_suite_statistics(self, suite_stats):
pass
def visit_stat(self, stat):
if self.start_stat(stat) is not False:
self.end_stat(stat)
def start_stat(self, stat):
pass
def end_stat(self, stat):
pass
def visit_errors(self, errors):
self.start_errors(errors)
for msg in errors:
msg.visit(self)
self.end_errors(errors)
def start_errors(self, errors):
pass
def end_errors(self, errors):
pass | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/visitor.py | 0.84317 | 0.747524 | visitor.py | pypi |
from itertools import chain
from robotide.lib.robot.model import TotalStatisticsBuilder, Criticality
from robotide.lib.robot import model, utils
from .configurer import SuiteConfigurer
from .messagefilter import MessageFilter
from .keywordremover import KeywordRemover
from .suiteteardownfailed import (SuiteTeardownFailureHandler,
SuiteTeardownFailed)
# TODO: Should remove model.Message altogether and just implement the whole
# thing here. Additionally model.Keyword should not have `message_class` at
# all or it should be None.
class Message(model.Message):
"""Represents a single log message.
See the base class for documentation of attributes not documented here.
"""
__slots__ = []
class Keyword(model.Keyword):
"""Represents results of a single keyword.
See the base class for documentation of attributes not documented here.
"""
__slots__ = ['kwname', 'libname', 'status', 'starttime', 'endtime', 'message']
message_class = Message
def __init__(self, kwname='', libname='', doc='', args=(), assign=(),
tags=(), timeout=None, type='kw', status='FAIL',
starttime=None, endtime=None):
model.Keyword.__init__(self, '', doc, args, assign, tags, timeout, type)
#: Name of the keyword without library or resource name.
self.kwname = kwname or ''
#: Name of the library or resource containing this keyword.
self.libname = libname or ''
#: Execution status as a string. Typically ``PASS`` or ``FAIL``, but
#: library keywords have status ``NOT_RUN`` in the dry-ryn mode.
#: See also :attr:`passed`.
self.status = status
#: Keyword execution start time in format ``%Y%m%d %H:%M:%S.%f``.
self.starttime = starttime
#: Keyword execution end time in format ``%Y%m%d %H:%M:%S.%f``.
self.endtime = endtime
#: Keyword status message. Used only if suite teardowns fails.
self.message = ''
@property
def elapsedtime(self):
"""Total execution time in milliseconds."""
return utils.get_elapsed_time(self.starttime, self.endtime)
@property
def name(self):
"""Keyword name in format ``libname.kwname``.
Just ``kwname`` if :attr:`libname` is empty. In practice that is the
case only with user keywords in the same file as the executed test case
or test suite.
Cannot be set directly. Set :attr:`libname` and :attr:`kwname`
separately instead.
"""
if not self.libname:
return self.kwname
return '%s.%s' % (self.libname, self.kwname)
@property
def passed(self):
"""``True`` or ``False`` depending on the :attr:`status`."""
return self.status == 'PASS'
@passed.setter
def passed(self, passed):
self.status = 'PASS' if passed else 'FAIL'
class TestCase(model.TestCase):
"""Represents results of a single test case.
See the base class for documentation of attributes not documented here.
"""
__slots__ = ['status', 'message', 'starttime', 'endtime']
keyword_class = Keyword
def __init__(self, name='', doc='', tags=None, timeout=None, status='FAIL',
message='', starttime=None, endtime=None):
model.TestCase.__init__(self, name, doc, tags, timeout)
#: Status as a string ``PASS`` or ``FAIL``. See also :attr:`passed`.
self.status = status
#: Test message. Typically a failure message but can be set also when
#: test passes.
self.message = message
#: Test case execution start time in format ``%Y%m%d %H:%M:%S.%f``.
self.starttime = starttime
#: Test case execution end time in format ``%Y%m%d %H:%M:%S.%f``.
self.endtime = endtime
@property
def elapsedtime(self):
"""Total execution time in milliseconds."""
return utils.get_elapsed_time(self.starttime, self.endtime)
@property
def passed(self):
"""``True/False`` depending on the :attr:`status`."""
return self.status == 'PASS'
@passed.setter
def passed(self, passed):
self.status = 'PASS' if passed else 'FAIL'
@property
def critical(self):
"""``True/False`` depending on is the test considered critical.
Criticality is determined based on test's :attr:`tags` and
:attr:`~TestSuite.criticality` of the :attr:`parent` suite.
"""
if not self.parent:
return True
return self.parent.criticality.test_is_critical(self)
class TestSuite(model.TestSuite):
"""Represents results of a single test suite.
See the base class for documentation of attributes not documented here.
"""
__slots__ = ['message', 'starttime', 'endtime', '_criticality']
test_class = TestCase
keyword_class = Keyword
def __init__(self, name='', doc='', metadata=None, source=None,
message='', starttime=None, endtime=None, rpa=False):
model.TestSuite.__init__(self, name, doc, metadata, source, rpa)
#: Possible suite setup or teardown error message.
self.message = message
#: Suite execution start time in format ``%Y%m%d %H:%M:%S.%f``.
self.starttime = starttime
#: Suite execution end time in format ``%Y%m%d %H:%M:%S.%f``.
self.endtime = endtime
self._criticality = None
@property
def passed(self):
"""``True`` if no critical test has failed, ``False`` otherwise."""
return not self.statistics.critical.failed
@property
def status(self):
"""``'PASS'`` if no critical test has failed, ``'FAIL'`` otherwise."""
return 'PASS' if self.passed else 'FAIL'
@property
def statistics(self):
"""Suite statistics as a :class:`~robot.model.totalstatistics.TotalStatistics` object.
Recreated every time this property is accessed, so saving the results
to a variable and inspecting it is often a good idea::
stats = suite.statistics
print(stats.critical.failed)
print(stats.all.total)
print(stats.message)
"""
return TotalStatisticsBuilder(self, self.rpa).stats
@property
def full_message(self):
"""Combination of :attr:`message` and :attr:`stat_message`."""
if not self.message:
return self.stat_message
return '%s\n\n%s' % (self.message, self.stat_message)
@property
def stat_message(self):
"""String representation of the :attr:`statistics`."""
return self.statistics.message
@property
def elapsedtime(self):
"""Total execution time in milliseconds."""
if self.starttime and self.endtime:
return utils.get_elapsed_time(self.starttime, self.endtime)
return sum(child.elapsedtime for child in
chain(self.suites, self.tests, self.keywords))
@property
def criticality(self):
"""Used by tests to determine are they considered critical or not.
Normally configured using ``--critical`` and ``--noncritical``
command line options. Can be set programmatically using
:meth:`set_criticality` of the root test suite.
"""
if self.parent:
return self.parent.criticality
if self._criticality is None:
self.set_criticality()
return self._criticality
def set_criticality(self, critical_tags=None, non_critical_tags=None):
"""Sets which tags are considered critical and which non-critical.
:param critical_tags: Tags or patterns considered critical. See
the documentation of the ``--critical`` option for more details.
:param non_critical_tags: Tags or patterns considered non-critical. See
the documentation of the ``--noncritical`` option for more details.
Tags can be given as lists of strings or, when giving only one,
as single strings. This information is used by tests to determine
are they considered critical or not.
Criticality can be set only to the root test suite.
"""
if self.parent is not None:
raise ValueError('Criticality can only be set to the root suite.')
self._criticality = Criticality(critical_tags, non_critical_tags)
def remove_keywords(self, how):
"""Remove keywords based on the given condition.
:param how: What approach to use when removing keywords. Either
``ALL``, ``PASSED``, ``FOR``, ``WUKS``, or ``NAME:<pattern>``.
For more information about the possible values see the documentation
of the ``--removekeywords`` command line option.
"""
self.visit(KeywordRemover(how))
def filter_messages(self, log_level='TRACE'):
"""Remove log messages below the specified ``log_level``."""
self.visit(MessageFilter(log_level))
def configure(self, **options):
"""A shortcut to configure a suite using one method call.
Can only be used with the root test suite.
:param options: Passed to
:class:`~robot.result.configurer.SuiteConfigurer` that will then
set suite attributes, call :meth:`filter`, etc. as needed.
Example::
suite.configure(remove_keywords='PASSED',
critical_tags='smoke',
doc='Smoke test results.')
"""
model.TestSuite.configure(self) # Parent validates call is allowed.
self.visit(SuiteConfigurer(**options))
def handle_suite_teardown_failures(self):
"""Internal usage only."""
self.visit(SuiteTeardownFailureHandler())
def suite_teardown_failed(self, message):
"""Internal usage only."""
self.visit(SuiteTeardownFailed(message)) | /robotframework-ride-2.0.7.tar.gz/robotframework-ride-2.0.7/src/robotide/lib/robot/result/model.py | 0.691185 | 0.153105 | model.py | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.