function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def test_award_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.award_groups(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_category(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.category(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_collection_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.collection_year(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_collection_year_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.collection_year(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_component_doi(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.component_doi(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_components(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.components(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_conflict(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.conflict(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_contributors(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.contributors(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_copyright_holder(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_holder(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_copyright_holder_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_holder_json(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_copyright_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_statement(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_copyright_year_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_year(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_correspondence(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.correspondence(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_digest(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.digest(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_display_channel(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.display_channel(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_doi(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.doi(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_elocation_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.elocation_id(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_abstract(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_abstract(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_affiliation(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_affiliation(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_award_group_funding_source(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_award_group_funding_source(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_award_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_award_groups(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_correspondence(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_correspondence(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_digest(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_digest(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_funding_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_funding_statement(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_keyword_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_keyword_groups(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_keywords(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_keywords(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_license(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_license(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_research_organism(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_research_organism(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_subject_area(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_subject_area(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_full_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_title(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_funding_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.funding_statement(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_graphics(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.graphics(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_impact_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.impact_statement(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_inline_graphics(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.inline_graphics(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_is_poa(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.is_poa(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_journal_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.journal_id(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_journal_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.journal_title(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_keywords(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.keywords(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_license(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_license_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license_json(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_license_url(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license_url(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_media(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.media(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_dates_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_dates(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_date_timestamp(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_timestamp(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_date_date(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_date(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_date_day(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_day(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_date_month(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_month(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_date_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_year(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_publisher(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.publisher(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_publisher_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.publisher_id(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_received_date_date(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_date(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_received_date_day(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_day(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_received_date_month(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_month(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_received_date_timestamp(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_timestamp(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_received_date_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_year(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_refs_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.refs(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_related_article(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.related_article(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_sub_articles(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.sub_articles(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_related_object_ids(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.related_object_ids(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_research_organism(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.research_organism(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_self_uri(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.self_uri(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_subject_area(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.subject_area(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_supplementary_material(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.supplementary_material(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_title_prefix(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_prefix(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_title_prefix_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_prefix_json(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_title_short(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_short(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_title_slug(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_slug(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_volume(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.volume(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_issue(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.issue(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_fpage(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.fpage(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_lpage(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.lpage(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_version_history(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.version_history(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_clinical_trials(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.clinical_trials(soup_body(soup)) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def test_pub_history(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_history(soup) self.assertEqual(expected, tag_content)
elifesciences/elife-tools
[ 14, 7, 14, 4, 1423012481 ]
def _get_PyLinterRunV(): # Separate function to speed up import. from pylint.lint import Run as PyLinterRun from pylint import __version__ as pylint_version if pylint_version >= '2.0.0': PyLinterRunV = PyLinterRun else: PyLinterRunV = lambda *args, do_exit=False: PyLinterRun( # pylint: disable=E1120, E1123 *args, exit=do_exit) # pylint: disable=E1120, E1123 return PyLinterRunV
sdpython/pyquickhelper
[ 21, 10, 21, 22, 1388194285 ]
def _extended_refactoring(filename, line): # pragma: no cover """ Private function which does extra checkings when refactoring :epkg:`pyquickhelper`. @param filename filename @param line line @return None or error message """ if "from pyquickhelper import fLOG" in line: if "test_code_style" not in filename: return "issue with fLOG" if "from pyquickhelper import noLOG" in line: if "test_code_style" not in filename: return "issue with noLOG" if "from pyquickhelper import run_cmd" in line: if "test_code_style" not in filename: return "issue with run_cmd" if "from pyquickhelper import get_temp_folder" in line: if "test_code_style" not in filename: return "issue with get_temp_folder" return None
sdpython/pyquickhelper
[ 21, 10, 21, 22, 1388194285 ]
def check_pep8(folder, ignore=('E265', 'W504'), skip=None, complexity=-1, stop_after=100, fLOG=None, pylint_ignore=('C0103', 'C1801', 'R0201', 'R1705', 'W0108', 'W0613', 'W0107', 'C0415', 'C0209'), recursive=True, neg_pattern=None, extended=None, max_line_length=143, pattern=".*[.]py$", run_lint=True, verbose=False, run_cmd_filter=None): """ Checks if :epkg:`PEP8`, the function calls command :epkg:`pycodestyle` on a specific folder. @param folder folder to look into @param ignore list of warnings to skip when raising an exception if :epkg:`PEP8` is not verified, see also `Error Codes <http://pep8.readthedocs.org/en/latest/intro.html#error-codes>`_ @param pylint_ignore ignore :epkg:`pylint` issues, see :epkg:`pylint error codes` @param complexity see `check_file <https://pycodestyle.pycqa.org/en/latest/api.html>`_ @param stop_after stop after *stop_after* issues @param skip skip a warning if a substring in this list is found @param neg_pattern skip files verifying this regular expressions @param extended list of tuple (name, function), see below @param max_line_length maximum allowed length of a line of code @param recursive look into subfolder @param pattern only file matching this pattern will be checked @param run_lint run :epkg:`pylint` @param verbose :epkg:`pylint` is slow, tells which file is investigated (but it is even slower) @param run_cmd_filter some files makes :epkg:`pylint` crashes (``import yaml``), the test for this is run in a separate process if the function *run_cmd_filter* returns True of the filename, *verbose* is set to True in that case @param fLOG logging function @return output Functions mentioned in *extended* takes two parameters (file name and line) and they returned None or an error message or a tuple (position in the line, error message). When the return is not empty, a warning will be added to the ones printed by :epkg:`pycodestyle`. A few codes to ignore: * *E501*: line too long (?? characters) * *E265*: block comments should have a space after # * *W504*: line break after binary operator, this one is raised after the code is modified by @see fn remove_extra_spaces_and_pep8. The full list is available at :epkg:`PEP8 codes`. In addition, the function adds its own codes: * *ECL1*: line too long for a specific reason. Some errors to disable with :epkg:`pylint`: * *C0103*: variable name is not conform * *C0111*: missing function docstring * *C1801*: do not use `len(SEQUENCE)` to determine if a sequence is empty * *R0201*: method could be a function * *R0205*: Class '?' inherits from object, can be safely removed from bases in python3 (pylint) * *R0901*: too many ancestors * *R0902*: too many instance attributes * *R0911*: too many return statements * *R0912*: too many branches * *R0913*: too many arguments * *R0914*: too many local variables * *R0915*: too many statements * *R1702*: too many nested blocks * *R1705*: unnecessary "else" after "return" * *W0107*: unnecessary pass statements * *W0108*: Lambda may not be necessary * *W0613*: unused argument The full list is available at :epkg:`pylint error codes`. :epkg:`pylint` was added used to check the code. It produces the following list of errors :epkg:`pylint error codes`. If *neg_pattern* is empty, it populates with a default value which skips unnecessary folders: ``".*[/\\\\\\\\]((_venv)|([.]git)|(__pycache__)|(temp_)).*"``. """ # delayed import to speed up import time of pycode import pycodestyle from ..filehelper.synchelper import explore_folder_iterfile if fLOG is None: from ..loghelper.flog import noLOG # pragma: no cover fLOG = noLOG # pragma: no cover def extended_checkings(fname, content, buf, extended): for i, line in enumerate(content): for name, fu in extended: r = fu(fname, line) if isinstance(r, tuple): c, r = r else: c = 1 if r is not None: buf.write("{0}:{1}:{4} F{2} {3}\n".format( fname, i + 1, name, r, c)) def fkeep(s): if len(s) == 0: return False if skip is not None: for kip in skip: if kip in s: return False return True if max_line_length is not None: if extended is None: extended = [] else: extended = extended.copy() def check_lenght_line(fname, line): if len(line) > max_line_length and not line.lstrip().startswith('#'): if ">`_" in line: return "line too long (link) {0} > {1}".format(len(line), max_line_length) if ":math:`" in line: return "line too long (:math:) {0} > {1}".format( # pragma: no cover len(line), max_line_length) if "ERROR: " in line: return "line too long (ERROR:) {0} > {1}".format( # pragma: no cover len(line), max_line_length) return None extended.append(("[ECL1]", check_lenght_line)) if ignore is None: ignore = tuple() elif isinstance(ignore, list): ignore = tuple(ignore) if neg_pattern is None: neg_pattern = ".*[/\\\\]((_venv)|([.]git)|(__pycache__)|(temp_)|([.]egg)|(bin)).*" try: regneg_filter = None if neg_pattern is None else re.compile( neg_pattern) except re.error as e: # pragma: no cover raise ValueError("Unable to compile '{0}'".format(neg_pattern)) from e # pycodestyle fLOG("[check_pep8] code style on '{0}'".format(folder)) files_to_check = [] skipped = [] buf = StringIO() with redirect_stdout(buf): for file in explore_folder_iterfile(folder, pattern=pattern, recursive=recursive): if regneg_filter is not None: if regneg_filter.search(file): skipped.append(file) continue if file.endswith("__init__.py"): ig = ignore + ('F401',) else: ig = ignore if file is None: raise RuntimeError( # pragma: no cover "file cannot be None") if len(file) == 0: raise RuntimeError( # pragma: no cover "file cannot be empty") # code style files_to_check.append(file) try: style = pycodestyle.StyleGuide( ignore=ig, complexity=complexity, format='pylint', max_line_length=max_line_length) res = style.check_files([file]) except TypeError as e: # pragma: no cover ext = "This is often due to an instruction from . import... The imported module has no name." raise TypeError("Issue with pycodesyle for module '{0}' ig={1} complexity={2}\n{3}".format( file, ig, complexity, ext)) from e if extended is not None: with open(file, "r", errors="ignore") as f: content = f.readlines() extended_checkings(file, content, buf, extended) if res.total_errors + res.file_errors > 0: res.print_filename = True lines = [_ for _ in buf.getvalue().split("\n") if fkeep(_)] if len(lines) > stop_after: raise PEP8Exception( # pragma: no cover "{0} lines\n{1}".format(len(lines), "\n".join(lines))) lines = [_ for _ in buf.getvalue().split("\n") if fkeep(_)] if len(lines) > 10: raise PEP8Exception( # pragma: no cover "{0} lines\n{1}".format(len(lines), "\n".join(lines))) if len(files_to_check) == 0: mes = skipped[0] if skipped else "-no skipped file-" raise FileNotFoundError( # pragma: no cover "No file found in '{0}'\n pattern='{1}'\nskipped='{2}'".format( folder, pattern, mes)) # pylint if not run_lint: return "\n".join(lines) fLOG("[check_pep8] pylint with {0} files".format(len(files_to_check))) memout = sys.stdout try: fLOG('', OutputStream=memout) regular_print = False except TypeError: # pragma: no cover regular_print = True def myprint(s): "local print, chooses the right function" if regular_print: # pragma: no cover memout.write(s + "\n") else: # pragma: no cover fLOG(s, OutputStream=memout) neg_pat = ".*temp[0-9]?_.*,doc_.*" if neg_pattern is not None: neg_pat += ',' + neg_pattern if run_cmd_filter is not None: verbose = True # pragma: no cover PyLinterRunV = _get_PyLinterRunV() sout = StringIO() serr = StringIO() with redirect_stdout(sout): with redirect_stderr(serr): with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) opt = ["--ignore-patterns=" + neg_pat, "--persistent=n", '--jobs=1', '--suggestion-mode=n', "--score=n", '--max-args=30', '--max-locals=50', '--max-returns=30', '--max-branches=50', '--max-parents=25', '--max-attributes=50', '--min-public-methods=0', '--max-public-methods=100', '--max-bool-expr=10', '--max-statements=200', '--msg-template={abspath}:{line}: {msg_id}: {msg} (pylint)'] if pylint_ignore: opt.append('--disable=' + ','.join(pylint_ignore)) if max_line_length: opt.append("--max-line-length=%d" % max_line_length) if verbose: # pragma: no cover for i, name in enumerate(files_to_check): cop = list(opt) cop.append(name) if run_cmd_filter is None or not run_cmd_filter(name): myprint( "[check_pep8] lint file {0}/{1} - '{2}'\n".format(i + 1, len(files_to_check), name)) PyLinterRunV(cop, do_exit=False) else: # delayed import to speed up import time of pycode from ..loghelper import run_cmd # runs from command line myprint( "[check_pep8] cmd-lint file {0}/{1} - '{2}'\n".format(i + 1, len(files_to_check), name)) cmd = "{0} -m pylint {1}".format( sys.executable, " ".join('"{0}"'.format(_) for _ in cop)) out = run_cmd(cmd, wait=True)[0] lines.extend(_ for _ in out.split( '\n') if _.strip('\r ')) else: opt.extend(files_to_check) PyLinterRunV(opt, do_exit=False) pylint_lines = sout.getvalue().split('\n') pylint_lines = [ _ for _ in pylint_lines if ( '(pylint)' in _ and fkeep(_) and _[0] != ' ' and len(_.split(':')) > 2)] pylint_lines = [_ for _ in pylint_lines if not _.startswith( "except ") and not _.startswith("else:") and not _.startswith( "try:") and "# noqa" not in _] lines.extend(pylint_lines) if len(lines) > 0: raise PEP8Exception( "{0} lines\n{1}".format(len(lines), "\n".join(lines))) return "\n".join(lines)
sdpython/pyquickhelper
[ 21, 10, 21, 22, 1388194285 ]
def __init__(self, parent=None, width=200, placeholder=u"Enter some text ..", value=u"", **kwargs): """ Constructs a new input field. An input field always needs a width specified """ LUIObject.__init__(self, x=0, y=0, solid=True) self.set_width(width) self._layout = LUIHorizontalStretchedLayout(parent=self, prefix="InputField", width="100%") # Container for the text self._text_content = LUIObject(self) self._text_content.margin = (5, 7, 5, 7) self._text_content.clip_bounds = (0,0,0,0) self._text_content.set_size("100%", "100%") # Scroller for the text, so we can move right and left self._text_scroller = LUIObject(parent=self._text_content) self._text_scroller.center_vertical = True self._text = LUILabel(parent=self._text_scroller, text="") # Cursor for the current position self._cursor = LUISprite(self._text_scroller, "blank", "skin", x=0, y=0, w=2, h=15) self._cursor.color = (0.5, 0.5, 0.5) self._cursor.margin.top = 2 self._cursor.z_offset = 20 self._cursor_index = 0 self._cursor.hide() self._value = value # Placeholder text, shown when out of focus and no value exists self._placeholder = LUILabel(parent=self._text_content, text=placeholder, shadow=False, center_vertical=True, alpha=0.2) # Various states self._tickrate = 1.0 self._tickstart = 0.0 self._render_text() if parent is not None: self.parent = parent LUIInitialState.init(self, kwargs)
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def value(self): """ Returns the value of the input field """ return self._value
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def value(self, new_value): """ Sets the value of the input field """ self._value = new_value self._render_text() self.trigger_event("changed", self._value)
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def cursor_pos(self): """ Set the cursor position """ return self._cursor_index
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def cursor_pos(self, pos): """ Set the cursor position """ if pos >= 0: self._cursor_index = max(0, min(len(self._value), pos)) else: self._cursor_index = max(len(self._value) + pos + 1, 0) self._reset_cursor_tick() self._render_text()
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def on_click(self, event): """ Internal on click handler """ self.request_focus()
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def _reset_cursor_tick(self): """ Internal method to reset the cursor tick """ self._tickstart = globalClock.get_frame_time()
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def on_keydown(self, event): """ Internal keydown handler. Processes the special keys, and if none are present, redirects the event """ key_name = event.message if key_name == "backspace": self._value = self._value[:max(0, self._cursor_index - 1)] + self._value[self._cursor_index:] self.cursor_pos -= 1 self.trigger_event("changed", self._value) elif key_name == "delete": post_value = self._value[min(len(self._value), self._cursor_index + 1):] self._value = self._value[:self._cursor_index] + post_value self.cursor_pos = self._cursor_index self.trigger_event("changed", self._value) elif key_name == "arrow_left": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_left() else: self.cursor_pos -= 1 elif key_name == "arrow_right": if event.get_modifier_state("alt") or event.get_modifier_state("ctrl"): self.cursor_skip_right() else: self.cursor_pos += 1 elif key_name == "escape": self.blur() elif key_name == "home": self.cursor_pos = 0 elif key_name == "end": self.cursor_pos = len(self.value) self.trigger_event(key_name, self._value)
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def on_textinput(self, event): """ Internal textinput handler """ self._value = self._value[:self._cursor_index] + event.message + \ self._value[self._cursor_index:] self.cursor_pos = self._cursor_index + len(event.message) self.trigger_event("changed", self._value)
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def _render_text(self): """ Internal method to render the text """ self._text.set_text(self._value) self._cursor.left = self._text.left + \ self._text.text_handle.get_char_pos(self._cursor_index) + 1 max_left = self.width - 15 if self._value: self._placeholder.hide() else: if not self.focused: self._placeholder.show() # Scroll if the cursor is outside of the clip bounds rel_pos = self.get_relative_pos(self._cursor.get_abs_pos()).x if rel_pos >= max_left: self._text_scroller.left = min(0, max_left - self._cursor.left) if rel_pos <= 0: self._text_scroller.left = min(0, - self._cursor.left - rel_pos)
tobspr/LUI
[ 77, 28, 77, 16, 1409080850 ]
def test_normal(self, value, expected): assert get_integer_digit(value) == expected
thombashi/DataProperty
[ 16, 5, 16, 1, 1455893692 ]
def test_abnormal(self, value, expected): assert get_integer_digit(value) == expected
thombashi/DataProperty
[ 16, 5, 16, 1, 1455893692 ]
def test_exception(self, value, exception): with pytest.raises(exception): get_integer_digit(value)
thombashi/DataProperty
[ 16, 5, 16, 1, 1455893692 ]
def test_normal(self, value, expected): assert get_number_of_digit(value) == expected
thombashi/DataProperty
[ 16, 5, 16, 1, 1455893692 ]
def test_normal_max_decimal_places(self, value, max_decimal_places, expected): assert get_number_of_digit(value, max_decimal_places=max_decimal_places) == expected
thombashi/DataProperty
[ 16, 5, 16, 1, 1455893692 ]
def initialize(self, app): super(EndpointHandler, self).initialize(app)
tableau/TabPy
[ 1378, 537, 1378, 10, 1475011563 ]
def put(self, name): if self.should_fail_with_auth_error() != AuthErrorStates.NONE: self.fail_with_auth_error() return self.logger.log(logging.DEBUG, f"Processing PUT for /endpoints/{name}") try: if not self.request.body: self.error_out(400, "Input body cannot be empty") self.finish() return try: request_data = json.loads(self.request.body.decode("utf-8")) except BaseException as ex: self.error_out( 400, log_message="Failed to decode input body", info=str(ex) ) self.finish() return # check if endpoint exists endpoints = self.tabpy_state.get_endpoints(name) if len(endpoints) == 0: self.error_out(404, f"endpoint {name} does not exist.") self.finish() return new_version = int(endpoints[name]["version"]) + 1 self.logger.log(logging.INFO, f"Endpoint info: {request_data}") err_msg = yield self._add_or_update_endpoint( "update", name, new_version, request_data ) if err_msg: self.error_out(400, err_msg) self.finish() else: self.write(self.tabpy_state.get_endpoints(name)) self.finish() except Exception as e: err_msg = format_exception(e, "update_endpoint") self.error_out(500, err_msg) self.finish()
tableau/TabPy
[ 1378, 537, 1378, 10, 1475011563 ]
def delete(self, name): if self.should_fail_with_auth_error() != AuthErrorStates.NONE: self.fail_with_auth_error() return self.logger.log(logging.DEBUG, f"Processing DELETE for /endpoints/{name}") try: endpoints = self.tabpy_state.get_endpoints(name) if len(endpoints) == 0: self.error_out(404, f"endpoint {name} does not exist.") self.finish() return # update state try: endpoint_info = self.tabpy_state.delete_endpoint(name) except Exception as e: self.error_out(400, f"Error when removing endpoint: {e.message}") self.finish() return # delete files if endpoint_info["type"] != "alias": delete_path = get_query_object_path( self.settings["state_file_path"], name, None ) try: yield self._delete_po_future(delete_path) except Exception as e: self.error_out(400, f"Error while deleting: {e}") self.finish() return self.set_status(204) self.finish() except Exception as e: err_msg = format_exception(e, "delete endpoint") self.error_out(500, err_msg) self.finish() on_state_change( self.settings, self.tabpy_state, self.python_service, self.logger )
tableau/TabPy
[ 1378, 537, 1378, 10, 1475011563 ]
def parseline(line): sline = line.split() if len(sline) < 11: return None m = PATTERN_IPV4.match(sline[0]) sortkey = None ip = None if m is None: m = PATTERN_IPV6.match(sline[0]) if m is None: m = PATTERN_ONION.match(sline[0]) if m is None: return None else: net = 'onion' ipstr = sortkey = m.group(1) port = int(m.group(2)) else: net = 'ipv6' if m.group(1) in ['::']: # Not interested in localhost return None ipstr = m.group(1) sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds port = int(m.group(2)) else: # Do IPv4 sanity check ip = 0 for i in range(0,4): if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255: return None ip = ip + (int(m.group(i+2)) << (8*(3-i))) if ip == 0: return None net = 'ipv4' sortkey = ip ipstr = m.group(1) port = int(m.group(6)) # Skip bad results. if sline[1] == 0: return None # Extract uptime %. uptime30 = float(sline[7][:-1]) # Extract Unix timestamp of last success. lastsuccess = int(sline[2]) # Extract protocol version. version = int(sline[10]) # Extract user agent. agent = sline[11][1:-1] # Extract service flags. service = int(sline[9], 16) # Extract blocks. blocks = int(sline[8]) # Construct result. return { 'net': net, 'ip': ipstr, 'port': port, 'ipnum': ip, 'uptime': uptime30, 'lastsuccess': lastsuccess, 'version': version, 'agent': agent, 'service': service, 'blocks': blocks, 'sortkey': sortkey, }
deeponion/deeponion
[ 63, 37, 63, 1, 1531515389 ]
def filtermultiport(ips): '''Filter out hosts with more nodes per IP''' hist = collections.defaultdict(list) for ip in ips: hist[ip['sortkey']].append(ip) return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
deeponion/deeponion
[ 63, 37, 63, 1, 1531515389 ]
def filterbyasn(ips, max_per_asn, max_per_net): # Sift out ips by type ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']] ips_onion = [ip for ip in ips if ip['net'] == 'onion'] # Filter IPv46 by ASN, and limit to max_per_net per network result = [] net_count = collections.defaultdict(int) asn_count = collections.defaultdict(int) for ip in ips_ipv46: if net_count[ip['net']] == max_per_net: continue asn = lookup_asn(ip['net'], ip['ip']) if asn is None or asn_count[asn] == max_per_asn: continue asn_count[asn] += 1 net_count[ip['net']] += 1 result.append(ip) # Add back Onions (up to max_per_net) result.extend(ips_onion[0:max_per_net]) return result
deeponion/deeponion
[ 63, 37, 63, 1, 1531515389 ]
def main(): lines = sys.stdin.readlines() ips = [parseline(line) for line in lines] print('\x1b[7m IPv4 IPv6 Onion Pass \x1b[0m', file=sys.stderr) print('%s Initial' % (ip_stats(ips)), file=sys.stderr) # Skip entries with invalid address. ips = [ip for ip in ips if ip is not None] print('%s Skip entries with invalid address' % (ip_stats(ips)), file=sys.stderr) # Skip duplicates (in case multiple seeds files were concatenated) ips = dedup(ips) print('%s After removing duplicates' % (ip_stats(ips)), file=sys.stderr) # Skip entries from suspicious hosts. ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS] print('%s Skip entries from suspicious hosts' % (ip_stats(ips)), file=sys.stderr) # Enforce minimal number of blocks. ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS] print('%s Enforce minimal number of blocks' % (ip_stats(ips)), file=sys.stderr) # Require service bit 1. ips = [ip for ip in ips if (ip['service'] & 1) == 1] print('%s Require service bit 1' % (ip_stats(ips)), file=sys.stderr) # Require at least 50% 30-day uptime for clearnet, 10% for onion. req_uptime = { 'ipv4': 50, 'ipv6': 50, 'onion': 10, } ips = [ip for ip in ips if ip['uptime'] > req_uptime[ip['net']]] print('%s Require minimum uptime' % (ip_stats(ips)), file=sys.stderr) # Require a known and recent user agent. ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])] print('%s Require a known and recent user agent' % (ip_stats(ips)), file=sys.stderr) # Sort by availability (and use last success as tie breaker) ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True) # Filter out hosts with multiple bitcoin ports, these are likely abusive ips = filtermultiport(ips) print('%s Filter out hosts with multiple bitcoin ports' % (ip_stats(ips)), file=sys.stderr) # Look up ASNs and limit results, both per ASN and globally. ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS) print('%s Look up ASNs and limit results per ASN and per net' % (ip_stats(ips)), file=sys.stderr) # Sort the results by IP address (for deterministic output). ips.sort(key=lambda x: (x['net'], x['sortkey'])) for ip in ips: if ip['net'] == 'ipv6': print('[%s]:%i' % (ip['ip'], ip['port'])) else: print('%s:%i' % (ip['ip'], ip['port']))
deeponion/deeponion
[ 63, 37, 63, 1, 1531515389 ]