hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a16e11d30299b47c532f821fca2dd6ace937154
| 63
|
py
|
Python
|
enthought/logger/log_queue_handler.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/logger/log_queue_handler.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/logger/log_queue_handler.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from apptools.logger.log_queue_handler import *
| 21
| 47
| 0.825397
| 9
| 63
| 5.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 2
| 48
| 31.5
| 0.892857
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c579bf57d3c8239cbbf68572f48bb4dfd3c32e31
| 229
|
py
|
Python
|
tests/test_heuristics.py
|
robopsi/symsynd
|
13825f10bfb9291fe7bcd13937ddf398e7104234
|
[
"BSD-3-Clause"
] | 23
|
2016-05-02T16:54:31.000Z
|
2020-03-11T00:11:47.000Z
|
tests/test_heuristics.py
|
robopsi/symsynd
|
13825f10bfb9291fe7bcd13937ddf398e7104234
|
[
"BSD-3-Clause"
] | 11
|
2016-04-07T20:21:50.000Z
|
2018-01-08T11:08:45.000Z
|
tests/test_heuristics.py
|
getsentry/symsynd
|
13825f10bfb9291fe7bcd13937ddf398e7104234
|
[
"BSD-3-Clause"
] | 9
|
2016-08-25T18:19:06.000Z
|
2021-08-20T15:55:44.000Z
|
from symsynd.heuristics import get_ip_register
def test_ip_reg():
assert get_ip_register({'pc': '0x42'}, 'arm7') == int('42', 16)
assert get_ip_register({}, 'arm7') == None
assert get_ip_register({}, 'x86') == None
| 28.625
| 67
| 0.659389
| 33
| 229
| 4.272727
| 0.575758
| 0.141844
| 0.368794
| 0.404255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057292
| 0.161572
| 229
| 7
| 68
| 32.714286
| 0.677083
| 0
| 0
| 0
| 0
| 0
| 0.082969
| 0
| 0
| 0
| 0.017467
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c5e1df9e08451362177ca6d79508215a49b3b902
| 25
|
py
|
Python
|
home/__init__.py
|
atharva-naik/talk-to-my-bots
|
70b8a989c39680c618170bba7b75c7931f0cc03f
|
[
"MIT"
] | null | null | null |
home/__init__.py
|
atharva-naik/talk-to-my-bots
|
70b8a989c39680c618170bba7b75c7931f0cc03f
|
[
"MIT"
] | null | null | null |
home/__init__.py
|
atharva-naik/talk-to-my-bots
|
70b8a989c39680c618170bba7b75c7931f0cc03f
|
[
"MIT"
] | null | null | null |
# Notice me GitHub Senpai
| 25
| 25
| 0.8
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0.92
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
68000a243e2eb98e4c1a9e90a88b088771319bab
| 57
|
py
|
Python
|
test_all.py
|
Mac-AI/BNA-traffic-mapper
|
9fcc3f516e18e19704444b6b848fc8aa356007bc
|
[
"MIT"
] | 4
|
2021-03-16T12:34:49.000Z
|
2021-04-15T01:16:32.000Z
|
test_all.py
|
McMasterAI/BNA-traffic-mapper
|
9fcc3f516e18e19704444b6b848fc8aa356007bc
|
[
"MIT"
] | 49
|
2020-10-13T19:03:07.000Z
|
2021-02-06T17:47:49.000Z
|
test_all.py
|
Mac-AI/BNA-traffic-mapper
|
9fcc3f516e18e19704444b6b848fc8aa356007bc
|
[
"MIT"
] | 3
|
2021-02-21T19:44:55.000Z
|
2021-06-28T22:26:36.000Z
|
import pytest
def test_pytest():
assert True == True
| 14.25
| 23
| 0.701754
| 8
| 57
| 4.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 57
| 4
| 23
| 14.25
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
680b846713f2168bb27be758bd65c44c552e00b9
| 204
|
py
|
Python
|
picograd/utils/system.py
|
akamaus/picograd
|
fe3a377806d3abd389a59d48981123c569c0e545
|
[
"MIT"
] | null | null | null |
picograd/utils/system.py
|
akamaus/picograd
|
fe3a377806d3abd389a59d48981123c569c0e545
|
[
"MIT"
] | null | null | null |
picograd/utils/system.py
|
akamaus/picograd
|
fe3a377806d3abd389a59d48981123c569c0e545
|
[
"MIT"
] | null | null | null |
import platform
import os
import shutil
sys = platform.system()
def link(origin, link_path):
if sys == "Windows":
shutil.copy(origin, link_path)
else:
os.link(origin, link_path)
| 17
| 38
| 0.661765
| 28
| 204
| 4.714286
| 0.5
| 0.227273
| 0.318182
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230392
| 204
| 11
| 39
| 18.545455
| 0.840764
| 0
| 0
| 0
| 0
| 0
| 0.034314
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.444444
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
a8b56afa7f247c01e76e136f4023f03095a33cc9
| 1,797
|
py
|
Python
|
test.py
|
greysondn/lmpkit
|
714e56cedea03ddb182f2ad301ff355ad59d8559
|
[
"MIT"
] | null | null | null |
test.py
|
greysondn/lmpkit
|
714e56cedea03ddb182f2ad301ff355ad59d8559
|
[
"MIT"
] | null | null | null |
test.py
|
greysondn/lmpkit
|
714e56cedea03ddb182f2ad301ff355ad59d8559
|
[
"MIT"
] | null | null | null |
import unittest
import run as lmpkit
# just some notes on prboom+ compat levels so I can't botch this
#
# test_01 - 3 - Doom Ultimate - Doom 1 - E1L1
# test_02 - 3 - Doom Ultimate - Doom 1 - E1L1
# test_03 - 3 - Doom Ultimate - Doom 1 - E1L1
# test_04 - 17 - PRBoom 6 - Doom 1 - E1L1
# test_05 - 16 - PRBoom 5 - Doom 1 - E1L1
#
# probably they hid this somewhere
# needs must locate when I get a chance
class TestLoads(unittest.TestCase):
'''
Just tests basic loading for files into framework
'''
def test_01(self):
lmpkit.createDemoLumpFromFile("test_files/test_01.lmp")
def test_02(self):
lmpkit.createDemoLumpFromFile("test_files/test_02.lmp")
def test_03(self):
lmpkit.createDemoLumpFromFile("test_files/test_03.lmp")
def test_04(self):
lmpkit.createDemoLumpFromFile("test_files/test_04.lmp")
def test_05(self):
lmpkit.createDemoLumpFromFile("test_files/test_05.lmp")
class TestLoadedLengths(unittest.TestCase):
'''
Tests lengths of loaded files
'''
def test_01(self):
# prboom says 1420 tics
lmpkit.createDemoLumpFromFile("test_files/test_01.lmp")
def test_02(self):
# prboom says 2204 tics
lmpkit.createDemoLumpFromFile("test_files/test_02.lmp")
def test_03(self):
# prboom says 694 tics
lmpkit.createDemoLumpFromFile("test_files/test_03.lmp")
def test_04(self):
# prboom says ??? tics
lmpkit.createDemoLumpFromFile("test_files/test_04.lmp")
def test_05(self):
# prboom says ??? tics
lmpkit.createDemoLumpFromFile("test_files/test_05.lmp")
if __name__ == '__main__':
unittest.main()
| 30.457627
| 71
| 0.634947
| 226
| 1,797
| 4.858407
| 0.309735
| 0.063752
| 0.291439
| 0.336976
| 0.624772
| 0.624772
| 0.6102
| 0.492714
| 0.492714
| 0.429873
| 0
| 0.064787
| 0.269894
| 1,797
| 59
| 72
| 30.457627
| 0.772104
| 0.316082
| 0
| 0.769231
| 0
| 0
| 0.192405
| 0.185654
| 0
| 0
| 0
| 0
| 0
| 1
| 0.384615
| false
| 0
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
763efba1c1e1a8c2abb3acc78d6e859012d73e7a
| 30
|
py
|
Python
|
punkin/_util.py
|
artPlusPlus/punkin
|
0265b06d67463a5aaf27e0b2a771220d41cae5ee
|
[
"MIT"
] | 1
|
2017-10-24T10:02:29.000Z
|
2017-10-24T10:02:29.000Z
|
punkin/_util.py
|
artPlusPlus/punkin
|
0265b06d67463a5aaf27e0b2a771220d41cae5ee
|
[
"MIT"
] | null | null | null |
punkin/_util.py
|
artPlusPlus/punkin
|
0265b06d67463a5aaf27e0b2a771220d41cae5ee
|
[
"MIT"
] | null | null | null |
class UNSET(object):
pass
| 10
| 20
| 0.666667
| 4
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 30
| 2
| 21
| 15
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
767bb532037fece4e8c5824d65b5f92a74494d11
| 102
|
py
|
Python
|
holland/core/backup/__init__.py
|
a5a351e7/holland
|
58a12a5ce10206eed9434ab42b02217de29784bb
|
[
"BSD-3-Clause"
] | null | null | null |
holland/core/backup/__init__.py
|
a5a351e7/holland
|
58a12a5ce10206eed9434ab42b02217de29784bb
|
[
"BSD-3-Clause"
] | null | null | null |
holland/core/backup/__init__.py
|
a5a351e7/holland
|
58a12a5ce10206eed9434ab42b02217de29784bb
|
[
"BSD-3-Clause"
] | null | null | null |
"""
import from core
"""
from holland.core.backup.base import BackupError, BackupRunner, BackupPlugin
| 20.4
| 76
| 0.784314
| 12
| 102
| 6.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107843
| 102
| 4
| 77
| 25.5
| 0.879121
| 0.156863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76a6f6057de06e05cbd262c3c522b74350da7d41
| 76
|
py
|
Python
|
loew/__init__.py
|
hfcredidio/loew
|
3ec23337808cc25280152f64981f23a709064f83
|
[
"MIT"
] | 5
|
2016-08-03T16:49:43.000Z
|
2022-01-26T00:33:10.000Z
|
loew/__init__.py
|
hfcredidio/loew
|
3ec23337808cc25280152f64981f23a709064f83
|
[
"MIT"
] | null | null | null |
loew/__init__.py
|
hfcredidio/loew
|
3ec23337808cc25280152f64981f23a709064f83
|
[
"MIT"
] | null | null | null |
import loew.chordal
import loew.radial
import loew.dipolar
import loew.misc
| 15.2
| 19
| 0.842105
| 12
| 76
| 5.333333
| 0.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 4
| 20
| 19
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
76aff678dbc52a57421cf0cf712cbeddc4d611fb
| 156
|
py
|
Python
|
quantorch/tensor.py
|
jialuechen/quantorch
|
6d61afa061f10a08f62a275055b3c3c565643dec
|
[
"MIT"
] | null | null | null |
quantorch/tensor.py
|
jialuechen/quantorch
|
6d61afa061f10a08f62a275055b3c3c565643dec
|
[
"MIT"
] | null | null | null |
quantorch/tensor.py
|
jialuechen/quantorch
|
6d61afa061f10a08f62a275055b3c3c565643dec
|
[
"MIT"
] | null | null | null |
import torch
def steps(end:float,steps=None,dtype=None,device=None)->torch.Tensor:
return torch.linspace(0.0,end,steps+1,dtype=dtype,device=device)[1:]
| 39
| 72
| 0.762821
| 27
| 156
| 4.407407
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.064103
| 156
| 4
| 72
| 39
| 0.787671
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
4f31984279b6658cc35f2215383e08b76f30bfbb
| 13,136
|
py
|
Python
|
mkdocs_awesome_pages_plugin/tests/e2e/test_nav.py
|
Owen-Liuyuxuan/mkdocs-awesome-pages-plugin
|
961363989877cbe4e4f9d0acda9ff22e352be9e1
|
[
"MIT"
] | 226
|
2018-02-07T09:58:36.000Z
|
2022-03-31T16:33:54.000Z
|
mkdocs_awesome_pages_plugin/tests/e2e/test_nav.py
|
Owen-Liuyuxuan/mkdocs-awesome-pages-plugin
|
961363989877cbe4e4f9d0acda9ff22e352be9e1
|
[
"MIT"
] | 55
|
2018-02-07T10:36:38.000Z
|
2022-03-16T03:23:47.000Z
|
mkdocs_awesome_pages_plugin/tests/e2e/test_nav.py
|
Owen-Liuyuxuan/mkdocs-awesome-pages-plugin
|
961363989877cbe4e4f9d0acda9ff22e352be9e1
|
[
"MIT"
] | 30
|
2018-05-01T17:27:03.000Z
|
2022-03-04T07:33:28.000Z
|
from .base import E2ETestCase
from ...meta import DuplicateRestItemError
from ...navigation import NavEntryNotFound
class TestNav(E2ETestCase):
def test_all_listed(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
'2.md',
'1.md'
])
]),
self.pagesFile(nav=[
'a',
'2.md',
'1.md'
])
])
self.assertEqual(navigation, [
('A', [
('2', '/a/2'),
('1', '/a/1')
]),
('2', '/2'),
('1', '/1')
])
def test_some_listed(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
'1.md'
])
]),
self.pagesFile(nav=[
'a',
'1.md'
])
])
self.assertEqual(navigation, [
('A', [
('1', '/a/1')
]),
('1', '/1')
])
def test_none_listed(self):
navigation = self.mkdocs(self.config, [
'1.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[])
])
])
self.assertEqual(navigation, [
('1', '/1')
])
def test_rest(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2.md',
('a', [
'1.md',
'2.md',
'3.md',
'4.md',
self.pagesFile(nav=[
'2.md',
'...',
'1.md'
])
]),
self.pagesFile(nav=[
'2.md',
'...'
])
])
self.assertEqual(navigation, [
('2', '/2'),
('1', '/1'),
('A', [
('2', '/a/2'),
('3', '/a/3'),
('4', '/a/4'),
('1', '/a/1')
])
])
def test_rest_empty(self):
navigation = self.mkdocs(self.config, [
'1.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
'2.md',
'...',
'1.md'
])
]),
self.pagesFile(nav=[
'a',
'...',
'1.md'
])
])
self.assertEqual(navigation, [
('A', [
('2', '/a/2'),
('1', '/a/1')
]),
('1', '/1')
])
def test_rest_glob(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2a.md',
'2b.md',
'3.md',
self.pagesFile(nav=[
'1.md',
'... | 2*.md',
'1.md'
])
])
self.assertEqual(navigation, [
('1', '/1'),
('2a', '/2a'),
('2b', '/2b'),
('1', '/1')
])
def test_rest_glob_section(self):
navigation = self.mkdocs(self.config, [
'a.md',
'b.md',
('a', [
'1a.md',
'1b.md',
'2a.md',
'2b.md',
self.pagesFile(nav=[
'... | *b.md',
'...'
])
]),
self.pagesFile(nav=[
'... | a*',
'b.md'
])
])
self.assertEqual(navigation, [
('A', '/a'),
('A', [
('1b', '/a/1b'),
('2b', '/a/2b'),
('1a', '/a/1a'),
('2a', '/a/2a')
]),
('B', '/b')
])
def test_rest_glob_precedence(self):
navigation = self.mkdocs(self.config, [
'1.md',
'1a.md',
'1b.md',
'2.md',
'2a.md',
'2b.md',
self.pagesFile(nav=[
'...',
{'Link 1': '/link1'},
'... | 1*.md',
{'Link 2': '/link2'},
'... | *[ab].md'
])
])
self.assertEqual(navigation, [
('2', '/2'),
('Link 1', '/link1'),
('1', '/1'),
('1a', '/1a'),
('1b', '/1b'),
('Link 2', '/link2'),
('2a', '/2a'),
('2b', '/2b')
])
def test_rest_regex(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2a.md',
'2b.md',
'3.md',
self.pagesFile(nav=[
'1.md',
r'... | regex=2\w*\.md',
'1.md'
])
])
self.assertEqual(navigation, [
('1', '/1'),
('2a', '/2a'),
('2b', '/2b'),
('1', '/1')
])
def test_rest_regex_section(self):
navigation = self.mkdocs(self.config, [
'a.md',
'b.md',
('a', [
'1a.md',
'1b.md',
'2a.md',
'2b.md',
self.pagesFile(nav=[
r'... | regex=\w*b\.md',
'...'
])
]),
self.pagesFile(nav=[
r'... | regex=a\w*',
'b.md'
])
])
self.assertEqual(navigation, [
('A', '/a'),
('A', [
('1b', '/a/1b'),
('2b', '/a/2b'),
('1a', '/a/1a'),
('2a', '/a/2a')
]),
('B', '/b')
])
def test_rest_regex_precedence(self):
navigation = self.mkdocs(self.config, [
'1.md',
'1a.md',
'1b.md',
'2.md',
'2a.md',
'2b.md',
self.pagesFile(nav=[
'...',
{'Link 1': '/link1'},
r'... | regex=1\w*\.md',
{'Link 2': '/link2'},
r'... | regex=\w*[ab]\.md'
])
])
self.assertEqual(navigation, [
('2', '/2'),
('Link 1', '/link1'),
('1', '/1'),
('1a', '/1a'),
('1b', '/1b'),
('Link 2', '/link2'),
('2a', '/2a'),
('2b', '/2b')
])
def test_title(self):
navigation = self.mkdocs(self.config, [
'1.md',
'2.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
'1.md',
{'Title 2': '2.md'}
])
]),
self.pagesFile(nav=[
{'Title 1': '1.md'},
'2.md',
{'Title A': 'a'}
])
])
self.assertEqual(navigation, [
('Title 1', '/1'),
('2', '/2'),
('Title A', [
('1', '/a/1'),
('Title 2', '/a/2')
])
])
def test_title_conflict(self):
navigation = self.mkdocs(self.config, [
('a', [
'1.md',
'2.md',
self.pagesFile(title='Title Meta')
]),
self.pagesFile(nav=[
{'Title Nav': 'a'},
'...'
])
])
self.assertEqual(navigation, [
('Title Meta', [
('1', '/a/1'),
('2', '/a/2')
])
])
def test_link(self):
navigation = self.mkdocs(self.config, [
'1.md',
('a', [
'1.md',
self.pagesFile(nav=[
'...',
{'Internal Link': '/link'},
{'External Link': 'https://lukasgeiter.com'}
])
]),
self.pagesFile(nav=[
'...',
{'Internal Link': '/link'},
{'External Link': 'https://lukasgeiter.com'}
])
])
self.assertEqual(navigation, [
('1', '/1'),
('A', [
('1', '/a/1'),
('Internal Link', '/link'),
('External Link', 'https://lukasgeiter.com')
]),
('Internal Link', '/link'),
('External Link', 'https://lukasgeiter.com')
])
def test_collapsed(self):
navigation = self.mkdocs(self.createConfig(collapse_single_pages=True), [
('a', [
'1.md',
'2.md'
]),
('b', [
'1.md'
]),
self.pagesFile(arrange=[
'b',
'a'
])
])
self.assertEqual(navigation, [
('1', '/b/1'),
('A', [
('1', '/a/1'),
('2', '/a/2')
])
])
def test_duplicate_file(self):
navigation = self.mkdocs(self.createConfig(mkdocs_nav=[
{'1a': '1.md'},
{'2': '2.md'},
{'1b': '1.md'}
]), [
'1.md',
'2.md',
self.pagesFile(nav=[
'2.md',
'1.md'
])
])
self.assertEqual(navigation, [
('2', '/2'),
('1b', '/1')
])
def test_duplicate_file_rest(self):
navigation = self.mkdocs(self.createConfig(mkdocs_nav=[
{'1a': '1.md'},
{'2': '2.md'},
{'1b': '1.md'}
]), [
'1.md',
'2.md',
self.pagesFile(nav=[
'2.md',
'...'
])
])
self.assertEqual(navigation, [
('2', '/2'),
('1a', '/1'),
('1b', '/1')
])
def test_duplicate_entry(self):
navigation = self.mkdocs(self.config, [
'1.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
'2.md',
'...',
'2.md'
])
]),
self.pagesFile(nav=[
'a',
'...',
'a'
])
])
self.assertEqual(navigation, [
('A', [
('2', '/a/2'),
('1', '/a/1'),
('2', '/a/2')
]),
('1', '/1'),
('A', [
('2', '/a/2'),
('1', '/a/1'),
('2', '/a/2')
])
])
def test_duplicate_entry_title(self):
navigation = self.mkdocs(self.config, [
'1.md',
('a', [
'1.md',
'2.md',
self.pagesFile(nav=[
{'2a': '2.md'},
'...',
{'2b': '2.md'}
])
]),
self.pagesFile(nav=[
{'AA': 'a'},
'...',
{'AB': 'a'}
])
])
self.assertEqual(navigation, [
('AB', [
('2b', '/a/2'),
('1', '/a/1'),
('2b', '/a/2')
]),
('1', '/1'),
('AB', [
('2b', '/a/2'),
('1', '/a/1'),
('2b', '/a/2')
])
])
def test_duplicate_rest_token(self):
with self.assertRaises(DuplicateRestItemError):
self.mkdocs(self.config, [
'1.md',
'2.md',
self.pagesFile(nav=[
'...',
'1.md',
'...'
])
])
def test_not_found(self):
with self.assertRaises(NavEntryNotFound):
self.mkdocs(self.config, [
self.pagesFile(nav=[
'1.md',
'...'
])
])
def test_not_found_strict(self):
with self.assertRaises(NavEntryNotFound):
self.mkdocs(self.createConfig(strict=True), [
self.pagesFile(nav=[
'1.md',
'...'
])
])
def test_not_found_not_strict(self):
with self.assertWarns(NavEntryNotFound):
self.mkdocs(self.createConfig(strict=False), [
self.pagesFile(nav=[
'1.md',
'...'
])
])
| 24.599251
| 81
| 0.279613
| 1,007
| 13,136
| 3.591857
| 0.070506
| 0.0423
| 0.141554
| 0.134366
| 0.830799
| 0.754216
| 0.705281
| 0.675422
| 0.610451
| 0.565662
| 0
| 0.04702
| 0.533724
| 13,136
| 533
| 82
| 24.645403
| 0.54351
| 0
| 0
| 0.869121
| 0
| 0
| 0.113124
| 0
| 0
| 0
| 0
| 0
| 0.047035
| 1
| 0.047035
| false
| 0
| 0.006135
| 0
| 0.055215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4f3d6bc0ac21d69b1b6c6d9eeaa5ab7246dc0c91
| 281
|
py
|
Python
|
services/traction/api/protocols/v1/issuer/__init__.py
|
bcgov/traction
|
90cec4f1aebccd68eb986cb89dfae5819a07a2ee
|
[
"Apache-2.0"
] | 12
|
2022-01-29T20:30:03.000Z
|
2022-03-29T11:46:14.000Z
|
services/traction/api/protocols/v1/issuer/__init__.py
|
bcgov/traction
|
90cec4f1aebccd68eb986cb89dfae5819a07a2ee
|
[
"Apache-2.0"
] | 38
|
2021-11-22T17:52:50.000Z
|
2022-03-31T17:52:00.000Z
|
services/traction/api/protocols/v1/issuer/__init__.py
|
bcgov/traction
|
90cec4f1aebccd68eb986cb89dfae5819a07a2ee
|
[
"Apache-2.0"
] | 9
|
2021-11-22T18:05:48.000Z
|
2022-03-29T11:25:08.000Z
|
from .issuer_credential_revocation_updater import IssuerCredentialRevocationUpdater
from .issuer_credential_status_updater import IssuerCredentialStatusUpdater
def subscribe_issuer_protocol_listeners():
IssuerCredentialStatusUpdater()
IssuerCredentialRevocationUpdater()
| 35.125
| 83
| 0.88968
| 21
| 281
| 11.47619
| 0.619048
| 0.082988
| 0.165975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078292
| 281
| 7
| 84
| 40.142857
| 0.930502
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f6441f761a1813e59f2f83951669b631518a539
| 261
|
py
|
Python
|
lib/__init__.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | 5
|
2019-08-14T07:24:10.000Z
|
2020-04-15T11:47:48.000Z
|
lib/__init__.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | null | null | null |
lib/__init__.py
|
magnetic-lab/tdgg
|
3a80ed82f4b6d1cd2e7a127e079a27141b2e0422
|
[
"MIT"
] | null | null | null |
"""Core Modules for TDGam."""
import sys
from os import path as osp
from .project import TDGamProject
from .touchdesigner_ui import TDGamProjectUI
from .component import TDGamComponent
from .touchdesigner_ui import TDGamComponentUI
from .maglapath import Path
| 26.1
| 46
| 0.827586
| 34
| 261
| 6.294118
| 0.588235
| 0.093458
| 0.17757
| 0.233645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 261
| 9
| 47
| 29
| 0.938596
| 0.088123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
96d7759424d4411f8fb55f8e9eb0540ba47475b4
| 1,457
|
py
|
Python
|
datamanagement/rules.py
|
truemrwalker/mads-app
|
79481293af2c0ce5533ab9ebd24868965c3c0031
|
[
"MIT"
] | null | null | null |
datamanagement/rules.py
|
truemrwalker/mads-app
|
79481293af2c0ce5533ab9ebd24868965c3c0031
|
[
"MIT"
] | 2
|
2021-04-22T06:57:27.000Z
|
2021-08-06T03:19:42.000Z
|
datamanagement/rules.py
|
truemrwalker/mads-app
|
79481293af2c0ce5533ab9ebd24868965c3c0031
|
[
"MIT"
] | 2
|
2021-02-12T01:19:44.000Z
|
2021-05-14T06:54:34.000Z
|
# rules.py
import rules
# from rules import Predicate
from rules import predicates
from common import rules as common_rules
from .models import DataSource
rules.add_rule('can_list_datasources', predicates.always_allow)
rules.add_rule('can_edit_datasource',
common_rules.is_resource_owner | predicates.is_superuser)
rules.add_rule('can_delete_datasource',
common_rules.is_resource_owner | predicates.is_superuser)
rules.add_rule('can_read_datasource',
common_rules.is_public | common_rules.is_resource_owner | predicates.is_superuser
| (common_rules.is_internal & (common_rules.is_resource_shareduser |
common_rules.is_resource_sharedgroupmember)))
rules.add_rule('datamanagement.list_datasources', predicates.always_allow)
rules.add_perm('datamanagement.change_datasource',
common_rules.is_resource_owner | predicates.is_superuser)
rules.add_perm('datamanagement.delete_datasource',
common_rules.is_resource_owner | predicates.is_superuser)
rules.add_perm('datamanagement.read_datasource',
common_rules.is_public | common_rules.is_resource_owner | predicates.is_superuser
| (common_rules.is_internal & (common_rules.is_resource_shareduser |
common_rules.is_resource_sharedgroupmember)))
rules.add_perm('datamanagement.add_datasource', predicates.is_authenticated)
| 44.151515
| 96
| 0.746054
| 170
| 1,457
| 5.982353
| 0.188235
| 0.162242
| 0.178958
| 0.20649
| 0.753196
| 0.753196
| 0.753196
| 0.666667
| 0.666667
| 0.666667
| 0
| 0
| 0.179822
| 1,457
| 32
| 97
| 45.53125
| 0.851046
| 0.024708
| 0
| 0.434783
| 0
| 0
| 0.164316
| 0.123413
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.173913
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8c56c613e4fecc6315dd3542ff36bde4dad783f3
| 402
|
gyp
|
Python
|
deps/libgdal/gyp-formats/ogr_pgdump.gyp
|
AmristarSolutions/node-gdal-next
|
8c0a7d9b26c240bf04abbf1b1de312b0691b3d88
|
[
"Apache-2.0"
] | 57
|
2020-02-08T17:52:17.000Z
|
2021-10-14T03:45:09.000Z
|
deps/libgdal/gyp-formats/ogr_pgdump.gyp
|
AmristarSolutions/node-gdal-next
|
8c0a7d9b26c240bf04abbf1b1de312b0691b3d88
|
[
"Apache-2.0"
] | 47
|
2020-02-12T16:41:40.000Z
|
2021-09-28T22:27:56.000Z
|
deps/libgdal/gyp-formats/ogr_pgdump.gyp
|
AmristarSolutions/node-gdal-next
|
8c0a7d9b26c240bf04abbf1b1de312b0691b3d88
|
[
"Apache-2.0"
] | 8
|
2020-03-17T11:18:07.000Z
|
2021-10-14T03:45:15.000Z
|
{
"includes": [
"../common.gypi"
],
"targets": [
{
"target_name": "libgdal_ogr_pgdump_frmt",
"type": "static_library",
"sources": [
"../gdal/ogr/ogrsf_frmts/pgdump/ogrpgdumplayer.cpp",
"../gdal/ogr/ogrsf_frmts/pgdump/ogrpgdumpdatasource.cpp",
"../gdal/ogr/ogrsf_frmts/pgdump/ogrpgdumpdriver.cpp"
],
"include_dirs": [
"../gdal/ogr/ogrsf_frmts/pgdump"
]
}
]
}
| 20.1
| 61
| 0.619403
| 42
| 402
| 5.690476
| 0.547619
| 0.117155
| 0.200837
| 0.284519
| 0.410042
| 0.217573
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171642
| 402
| 19
| 62
| 21.157895
| 0.717718
| 0
| 0
| 0.105263
| 0
| 0
| 0.70398
| 0.512438
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8c58fc2ee6852e6d9dc2dd40504654c42bf89e3a
| 1,600
|
py
|
Python
|
Naloge/nal8.py
|
vitorozman/Project-Euler
|
9bd5e8b71b950c4d5d27d4674f0108bb71210504
|
[
"MIT"
] | null | null | null |
Naloge/nal8.py
|
vitorozman/Project-Euler
|
9bd5e8b71b950c4d5d27d4674f0108bb71210504
|
[
"MIT"
] | null | null | null |
Naloge/nal8.py
|
vitorozman/Project-Euler
|
9bd5e8b71b950c4d5d27d4674f0108bb71210504
|
[
"MIT"
] | null | null | null |
def razbi_stevilo(n, st):
s = []
st = str(st)
while len(st) > n:
stevilo = st[:n]
s.append(stevilo)
st = st[1:]
return s #seznam 13 mestnih steil podanih v nizu
def razbi_stevke(n):
sez_stevk = []
for stevka in n:
sez_stevk.append(int(stevka))
sez_stevk.sort()
return sez_stevk
def max_zmnozek(n, st):
s = razbi_stevilo(n, st)
sez_stevil = []
for ste in s:
sez_stevil.append(razbi_stevke(ste))
maxi = max(sez_stevil)
zmnozek = 1
for stevka in maxi:
zmnozek *= stevka
return zmnozek
st = 7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450
| 53.333333
| 1,005
| 0.841875
| 90
| 1,600
| 14.833333
| 0.366667
| 0.02397
| 0.019476
| 0.022472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.710545
| 0.116875
| 1,600
| 30
| 1,005
| 53.333333
| 0.234253
| 0.02375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0
| 0
| 0.24
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8c65121a281005f15f6eb2c3c86edf46fd1d78a2
| 2,722
|
py
|
Python
|
unit_tests/test_app.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2019-10-03T13:58:29.000Z
|
2019-10-03T13:58:29.000Z
|
unit_tests/test_app.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | null | null | null |
unit_tests/test_app.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2021-04-11T05:24:57.000Z
|
2021-04-11T05:24:57.000Z
|
from flask_testing import TestCase
from flask import g
from maintain_frontend import main
from unittest.mock import patch
from maintain_frontend.dependencies.session_api.session import Session
class TestApp(TestCase):
def create_app(self):
main.app.config['PRESERVE_CONTEXT_ON_EXCEPTION'] = False
return main.app
@patch('maintain_frontend.app.Session')
def test_trace_id_being_set_from_header(self, mock_session):
with main.app.app_context():
mock_session.return_value.valid.return_value = True
mock_session.session_cookie_name = Session.session_cookie_name
self.client.set_cookie('localhost', Session.session_cookie_name,
'cookie_value')
response = self.client.get('/', headers={'X-Trace-ID': '123'})
self.assert200(response)
self.assertIsNotNone(g.trace_id)
self.assertEqual(g.trace_id, '123')
@patch('maintain_frontend.app.Session')
def test_trace_id_being_generated(self, mock_session):
with main.app.app_context():
mock_session.return_value.valid.return_value = True
mock_session.session_cookie_name = Session.session_cookie_name
self.client.set_cookie('localhost', Session.session_cookie_name,
'cookie_value')
response = self.client.get('/')
self.assert200(response)
self.assertIsNotNone(g.trace_id)
self.assertEqual(len(g.trace_id), 32)
def test_if_session_key_none_redirected_to_login(self):
with main.app.app_context():
response = self.client.get('/')
self.assertStatus(response, status_code=302)
self.assert_redirects(response, '/sign-in')
cookies = response.headers.getlist('Set-Cookie')
self.assertIsNotNone(cookies)
self.assertIn('Location=/', cookies[0].replace('"', ''))
@patch('maintain_frontend.app.Session')
def test_if_session_invalid_one_redirected_to_logout(self, mock_session):
with main.app.app_context():
mock_session.return_value.valid.return_value = False
mock_session.session_cookie_name = Session.session_cookie_name
self.client.set_cookie('localhost', Session.session_cookie_name,
'cookie_value')
response = self.client.get('/')
self.assertStatus(response, status_code=302)
self.assert_redirects(response, '/logout')
cookies = response.headers.getlist('Set-Cookie')
self.assertIsNotNone(cookies)
self.assertIn('Location=/', cookies[0].replace('"', ''))
| 45.366667
| 77
| 0.652461
| 310
| 2,722
| 5.451613
| 0.245161
| 0.05858
| 0.106509
| 0.127811
| 0.751479
| 0.739053
| 0.739053
| 0.716568
| 0.716568
| 0.716568
| 0
| 0.01068
| 0.243204
| 2,722
| 59
| 78
| 46.135593
| 0.809709
| 0
| 0
| 0.634615
| 0
| 0
| 0.094048
| 0.042616
| 0
| 0
| 0
| 0
| 0.269231
| 1
| 0.096154
| false
| 0
| 0.096154
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4fbfe200c930a824972da872234e6e6f2d7f9952
| 536
|
py
|
Python
|
hubconf.py
|
kirilkoroves/torchvision-0.3.0
|
39f46d141f6a7ac2b094545c33936ad4500d3c7d
|
[
"BSD-3-Clause"
] | 10
|
2020-07-30T23:30:01.000Z
|
2021-11-21T16:51:01.000Z
|
hubconf.py
|
kirilkoroves/torchvision-0.3.0
|
39f46d141f6a7ac2b094545c33936ad4500d3c7d
|
[
"BSD-3-Clause"
] | 1
|
2022-01-26T09:14:12.000Z
|
2022-01-26T09:14:12.000Z
|
hubconf.py
|
kirilkoroves/torchvision-0.3.0
|
39f46d141f6a7ac2b094545c33936ad4500d3c7d
|
[
"BSD-3-Clause"
] | 5
|
2021-04-23T23:19:08.000Z
|
2021-08-31T07:10:00.000Z
|
# Optional list of dependencies required by the package
dependencies = ['torch']
from torchvision.models.alexnet import alexnet
from torchvision.models.densenet import densenet121, densenet169, densenet201, densenet161
from torchvision.models.inception import inception_v3
from torchvision.models.resnet import resnet18, resnet34, resnet50, resnet101, resnet152
from torchvision.models.squeezenet import squeezenet1_0, squeezenet1_1
from torchvision.models.vgg import vgg11, vgg13, vgg16, vgg19, vgg11_bn, vgg13_bn, vgg16_bn, vgg19_bn
| 53.6
| 101
| 0.841418
| 68
| 536
| 6.529412
| 0.544118
| 0.202703
| 0.283784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092975
| 0.097015
| 536
| 9
| 102
| 59.555556
| 0.82438
| 0.098881
| 0
| 0
| 0
| 0
| 0.010395
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4fdb234636a20ef1eda7a6b376fe1f3fa5807305
| 897
|
py
|
Python
|
recursion/test_eating_cookies.py
|
Oyekunle-Mark/tangled-mystery
|
ae789d5b05a4791594427f23d52249af81748ff4
|
[
"MIT"
] | null | null | null |
recursion/test_eating_cookies.py
|
Oyekunle-Mark/tangled-mystery
|
ae789d5b05a4791594427f23d52249af81748ff4
|
[
"MIT"
] | null | null | null |
recursion/test_eating_cookies.py
|
Oyekunle-Mark/tangled-mystery
|
ae789d5b05a4791594427f23d52249af81748ff4
|
[
"MIT"
] | 1
|
2021-09-03T16:17:47.000Z
|
2021-09-03T16:17:47.000Z
|
import unittest
from eating_cookies import eating_cookies
class Test(unittest.TestCase):
def test_eating_cookies_small_n(self):
self.assertEqual(eating_cookies(0), 1)
self.assertEqual(eating_cookies(1), 1)
self.assertEqual(eating_cookies(2), 2)
self.assertEqual(eating_cookies(5), 13)
self.assertEqual(eating_cookies(10), 274)
def test_eating_cookies_large_n(self):
self.assertEqual(eating_cookies(
50, [0 for i in range(51)]), 10562230626642)
self.assertEqual(eating_cookies(
100, [0 for i in range(101)]), 180396380815100901214157639)
self.assertEqual(eating_cookies(500, [0 for i in range(
501)]), 1306186569702186634983475450062372018715120191391192207156664343051610913971927959744519676992404852130396504615663042713312314219527)
if __name__ == '__main__':
unittest.main()
| 35.88
| 154
| 0.721293
| 97
| 897
| 6.402062
| 0.381443
| 0.251208
| 0.270531
| 0.360709
| 0.257649
| 0.10628
| 0
| 0
| 0
| 0
| 0
| 0.284341
| 0.188406
| 897
| 24
| 155
| 37.375
| 0.568681
| 0
| 0
| 0.111111
| 0
| 0
| 0.008919
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4fea7ae66640cbdcd37c11ee1c38c51c984f3611
| 96
|
py
|
Python
|
answers/pandas_answer4.py
|
monocilindro/foss4g-geopandas
|
12afbc787c1f65cc046234b41166bd62bbb6ac29
|
[
"Apache-2.0"
] | null | null | null |
answers/pandas_answer4.py
|
monocilindro/foss4g-geopandas
|
12afbc787c1f65cc046234b41166bd62bbb6ac29
|
[
"Apache-2.0"
] | null | null | null |
answers/pandas_answer4.py
|
monocilindro/foss4g-geopandas
|
12afbc787c1f65cc046234b41166bd62bbb6ac29
|
[
"Apache-2.0"
] | null | null | null |
boroughs[boroughs['Third_largest_migrant_population_by_country_of_birth_(2011)'] == 'Romania']
| 48
| 95
| 0.833333
| 12
| 96
| 6
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.041667
| 96
| 1
| 96
| 96
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0.6875
| 0.614583
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8b34947e76a240646694706fea00c4bf17e9b32c
| 97
|
py
|
Python
|
unified_message_relay/__init__.py
|
Gerongfenh/UnifiedMessageRelay
|
5086d345df497451ad20da4b321c708bf4ea3819
|
[
"MIT"
] | null | null | null |
unified_message_relay/__init__.py
|
Gerongfenh/UnifiedMessageRelay
|
5086d345df497451ad20da4b321c708bf4ea3819
|
[
"MIT"
] | null | null | null |
unified_message_relay/__init__.py
|
Gerongfenh/UnifiedMessageRelay
|
5086d345df497451ad20da4b321c708bf4ea3819
|
[
"MIT"
] | null | null | null |
from . import Core
from . import Lib
from . import Util
from . import daemon
__VERSION__ = '4.3'
| 16.166667
| 20
| 0.721649
| 15
| 97
| 4.4
| 0.6
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.195876
| 97
| 5
| 21
| 19.4
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0.030928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8c70617e4a02fd1c4b9a8509e20dd7991168fc5e
| 43
|
py
|
Python
|
test/run/t28.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t28.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t28.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
z = 0
for x in [1,2,3]:
z += x
print z
| 8.6
| 17
| 0.44186
| 12
| 43
| 1.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0.372093
| 43
| 4
| 18
| 10.75
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8c7e03630bf64f865f71cdb9e1cf8141a61f329b
| 553
|
py
|
Python
|
redshells/model/__init__.py
|
mski-iksm/redshells
|
1e956fed9b000ea3f6ba1c96e25d5dd953025155
|
[
"MIT"
] | null | null | null |
redshells/model/__init__.py
|
mski-iksm/redshells
|
1e956fed9b000ea3f6ba1c96e25d5dd953025155
|
[
"MIT"
] | null | null | null |
redshells/model/__init__.py
|
mski-iksm/redshells
|
1e956fed9b000ea3f6ba1c96e25d5dd953025155
|
[
"MIT"
] | null | null | null |
from redshells.model.factorization_machine import FactorizationMachineGraph, FactorizationMachine
from redshells.model.feature_aggregation_similarity_model import FeatureAggregationSimilarityModel
from redshells.model.graph_convolutional_matrix_completion import GraphConvolutionalMatrixCompletion
from redshells.model.lda_model import LdaModel
from redshells.model.matrix_factorization_model import MatrixFactorizationGraph, MatrixFactorization
from redshells.model.scdv import SCDV
import redshells.model.utils
from redshells.model.tdidf import Tfidf
| 61.444444
| 100
| 0.909584
| 58
| 553
| 8.5
| 0.431034
| 0.227181
| 0.255578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057866
| 553
| 8
| 101
| 69.125
| 0.946257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8ca1de20d3774e797c51c2a62682b0790e5b9416
| 203
|
py
|
Python
|
py/examples/client_send_request.py
|
NorthIsUp/loqui
|
8d394a7951fd3a82d109becc1aebbd9e7ccc894a
|
[
"MIT"
] | 147
|
2017-10-02T18:16:52.000Z
|
2020-03-16T03:26:40.000Z
|
py/examples/client_send_request.py
|
NorthIsUp/loqui
|
8d394a7951fd3a82d109becc1aebbd9e7ccc894a
|
[
"MIT"
] | 14
|
2017-09-19T16:13:32.000Z
|
2019-06-25T21:18:47.000Z
|
py/examples/client_send_request.py
|
NorthIsUp/loqui
|
8d394a7951fd3a82d109becc1aebbd9e7ccc894a
|
[
"MIT"
] | 25
|
2017-10-01T20:10:31.000Z
|
2020-03-19T14:00:20.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from loqui.client import LoquiClient
client = LoquiClient(('localhost', 4001))
print(len(client.send_request('hello world')))
| 33.833333
| 46
| 0.82266
| 26
| 203
| 6
| 0.615385
| 0.128205
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021622
| 0.08867
| 203
| 6
| 46
| 33.833333
| 0.821622
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8cb1f39b02dc2138ef054ac32a9272c437cde93d
| 3,386
|
py
|
Python
|
tasks-deploy/empty-website/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | 3
|
2021-03-30T06:27:58.000Z
|
2021-04-03T17:56:35.000Z
|
tasks-deploy/empty-website/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
tasks-deploy/empty-website/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
TASK_URL = "http://empty-website.ctf.sicamp.ru:8080"
TITLE = "Пустой сайт?"
STATEMENT_TEMPLATE = f'''
Действительно ли на [этом]({TASK_URL}/{{0}}) сайте нет ничего полезного?
'''
def generate(context):
participant = context['participant']
token = tokens[participant.id % len(tokens)]
return TaskStatement(TITLE, STATEMENT_TEMPLATE.format(token))
tokens = ['JYXQy1aaW8a', '6Mx8ScjqDT1', 'iwqT9p3fk9M', '3P50kxuhT3Z', '1jQQOPRP6H7', 'wJUii7jqEd7', 'ZzbpWLSMeTS', 'Af1DrKb8EHc', '2FQ3KQHodip', 'bahsBje6xoj', '858SBV067AX', 'Q3xuPnFRvmQ', 'HBQy8PxIh5a', 'Z8YhgXjPR2M', 'KlaIXS6XiYT', 'NtA01niBRPO', 'HEKMYj08M8T', '9mtdnWOYo2r', 'FrtxKtJAFrX', 'Nv0gsE1Fg0D', 'SIz1gKoxFe2', 'xQxe6KvaygR', 'TYH4cyE4T0T', 'KuQKWXyTJBd', 'yFS50iWCHft', 'MXbWUmPH6RT', 'l9couPDiJIN', 'r2NyZoKVv7C', 'ZK3Zdn3Jegi', '5U8dQWS1Xj1', 'KV4TmV4GRr9', 'akalCLNwfv9', 'koAIaUJikKX', 'lnTFVzDsXrg', 'tKMitYN6UUv', 'Q4Bxv9mbTWi', 'e3zVTqOv7Aa', 'QGN776NKym5', 'yr5Bw2bgMyG', 'CyP2OW6slCG', 'dIXIO1Am63W', 'qoIBvXM6DCZ', 'Mtw8fJcN3Fm', 'KFOu1oGwGHm', 'g7ENHKbxipz', 'fYIpIEJKSg0', 'sDGgKenZeth', 'Ia1h3Mr7JXl', 'LSlm9Fgwyrf', 'fRnpnXmx7n0', 'LDv8SoknyKR', 'Dc6sYYLLIGd', 'TA1MXGEUgQw', 'hqoDvU0s72H', 'bZNFLFRJ3uL', 'KDUuI7B7p0x', 'DVisdvNqULV', 'XZncuTzXrtu', 'FcyPTmjdv28', 'vRd4tkAR49y', 'is7ldpp1yJF', 'KmDTxXVTu7G', 'mJ7bVli0wKt', 'P3lCZs402l2', 'GR1TFZ2n0Zv', 'ToIGufGe8WU', 'iS9alRiFBtk', 'by3NViB1NLq', '8Qq5udeeHtu', 's2Psk5fngQB', '4GU3sfn3wUn', 'l36mYTKattP', 'FSiqIS6wI2d', 'TFQa62Ra1Gy', 'mv6HSMR1VWy', 'mCupiAEHH4P', 'VJ2gVGv6h9k', 'WOdzzjsRm7t', 'RRJgM6ISouE', 'h6OngeT9tVW', '4n7KmYl0t08', '05QWZOC5IEx', 'TIdYIsULi2O', 'yy4fpgNgzL3', 'MCuFsoSfDBA', 'WqASJsL4wQW', 'uC8hdWMnYJp', 'szrCF5wrcoC', 'xeAs3elzHdB', 'qfhPa6lkaBT', 'vqZD1wVqj5U', '7cHXPmZk6r1', 'It9LanMAigH', 'X7gLYUSnOzW', '80pKaFAwvoZ', 'kb5zKbNyRag', 'Zm2GZUkfuKq', 'vu4fUsWcqxs', 'Cga427nt4Uh', 'ypaTBRdFtmG', '1DNit22cZCp', 'lBko0YFrbAV', 'C1qLMjDifOn', 'yUru5xep9Yo', 'XdtqnkccQDw', 'cqwZVmtgHUz', 'bqY6IGx3C3E', 'l1ZiWgy0ndf', 'hA8kulfqlGM', 'kdFFG6E0d6o', 'PBInOCZzNnt', 'viDHxtHUZg6', 'h41hpcCKuH0', 'wLE3Yaowm1F', 'lE7FNi9bTwO', 'iAq6wsM1ZVZ', 'vijQq3NVQoo', 'nehrYgtFu2T', 'h3xtaHVUVHe', '93uF6mauk9e', 'HDhmQ1gSfkj', 'lLTc53CvcxJ', '2kb9qI1riSg', 'bnB32cIpXcb', '0xVBnxNm5up', 'cq1xklFqEYO', 'drUpm4eEr8V', 'GBj3pIhg1vN', 'b150iodchxP', 'Cr2wvqyOU7m', 'p6Jq2Zao5UK', '4L7rXjk9FQy', '1qoAbGaTPBi', 'ijmLTvs3SnG', 'b5vVB5cwsbb', 'a2KiYdXSRLx', 'DnqFMubOoZY', 'N2ENocGsfxv', 'V2JZdHUlKgA', '33Hx4cGWBc7', 'Z6Ok8FBNQH3', 'xcNynugmnPY', 'JCpoIfrsC3v', 'iTwbQobC8o5', 'XKwirImHcNC', '0KMPBkbL70T', 'kYgxuUeEDmp', 'vc1lxSO765L', '3mEYDPMUqXT', 'kGG8pk5xLwn', '51XDb7HqH89', '9fqISjAVGpT', '1N1A8Bh3wJ7', 'oSpzHHVzD8o', 'B62NnFLMy9i', 'MGfrnzOotRZ', '2SAQoPq6bZE', '64vx5DWdYT7', 'itPTS3ayJPv', 'bQPVQcACplx', 'b8I7XFhm4zw', 'WUuBtOxyfLm', 'j6EVrDWPsvm', 'l3NgtPRsvPv', 'ds7zgnmYdZ7', 'QZJkekC3HOa', 'rBETFKSa1mH', 'lPwq0qMHROa', 'RuCe6Pim798', 'qUKsHPtPW4T', 'rvqty6vXmCU', 'kC7NwqiwQhj', 'cxDkVXXFCPr', '2YkqfeKcigl', 'WS9EQnfkIAt', 'BdIrEnA5NGC', 'PKlYR6mglNG', '2vm7oR11xWT', '6fHAtT3MyZz', 'v6hmWLxtdoU', 'HLnwIawgzMc', 'gGSgrIYsqqm', '7qyioDgHJmn', 'GiqiTKevEV3', 'Zt7AFBqRkJz', 'evtj9vVYTXi', 'L3sv1FRRTWx', 'tV08XSJwQTy', 'DCphK7pwtMO', 'yeFjqpizUtF', 'oBvweIFeHfE', 'M8V3Z1tWjX3', 'oCfrVAZDkr4', 'LlZuCPjpvqa', 'G7JsqHfTcIC', 'hJH3VtM9MSi', 'mPLnbWyymGl', '5CRYNdWlYNp', 'wSVHLWSkC2C', 'n3iPLlzoCrk']
| 260.461538
| 3,010
| 0.727998
| 246
| 3,386
| 10.004065
| 0.955285
| 0.005689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109865
| 0.077968
| 3,386
| 12
| 3,011
| 282.166667
| 0.678411
| 0
| 0
| 0
| 1
| 0
| 0.692353
| 0.007113
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8cc5fab9bc931462499ffcf9704aa26be4ea0664
| 14,002
|
py
|
Python
|
tests/test_admin_views.py
|
VelocityWebworks/django-media-albums
|
e9aa8afd7dc7d1e45a8220d9d98007454e50030f
|
[
"MIT"
] | 1
|
2019-11-07T03:17:27.000Z
|
2019-11-07T03:17:27.000Z
|
tests/test_admin_views.py
|
VelocityWebworks/django-media-albums
|
e9aa8afd7dc7d1e45a8220d9d98007454e50030f
|
[
"MIT"
] | null | null | null |
tests/test_admin_views.py
|
VelocityWebworks/django-media-albums
|
e9aa8afd7dc7d1e45a8220d9d98007454e50030f
|
[
"MIT"
] | null | null | null |
try:
from importlib import reload
except ImportError:
pass
from django.contrib.auth import get_user_model
from django.core.urlresolvers import NoReverseMatch, clear_url_caches, reverse
from django.test import TestCase
from django.test.utils import override_settings
from media_albums.models import Album, UserPhoto
from media_albums import admin as media_albums_admin
from media_albums.settings import compute_settings
from . import urls as test_urls
class ViewsTest(TestCase):
fixtures = [
'media_albums_test_data.json',
]
def reload(self):
compute_settings()
reload(media_albums_admin)
clear_url_caches()
reload(test_urls)
def setUp(self):
credentials = {
'username': 'staff_user',
'password': 'testing!',
}
staff_user = get_user_model()._default_manager.create_user(
username=credentials['username'],
password=credentials['password'],
email='superuser@example.com',
first_name='Super',
last_name='User',
)
staff_user.is_staff = True
staff_user.is_superuser = True
staff_user.save()
self.client.login(
username=credentials['username'],
password=credentials['password'],
)
def test_album_views(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_album_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_album_add',
'url_args': [],
},
{
'url': 'admin:media_albums_album_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_album_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_audiofile_views_are_disabled_by_default(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_audiofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_audiofile_delete',
'url_args': [1],
},
]
for test in tests:
self.assertRaises(
NoReverseMatch,
reverse,
test['url'],
args=test['url_args'],
)
@override_settings(MEDIA_ALBUMS={
'audio_files_enabled': True,
})
def test_audiofile_views_audio_enabled(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_audiofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_audiofile_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
if test['url'][-4:] == '_add' or test['url'][-7:] == '_change':
form = response.context['adminform'].form
self.assertFalse(form.fields['audio_file_2'].required)
@override_settings(MEDIA_ALBUMS={
'audio_files_enabled': True,
'audio_files_format2_required': True,
})
def test_audiofile_views_audio_enabled_with_format2_required(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_audiofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_audiofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_audiofile_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
if test['url'][-4:] == '_add' or test['url'][-7:] == '_change':
form = response.context['adminform'].form
self.assertTrue(form.fields['audio_file_2'].required)
def test_photo_views_are_enabled_by_default(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_photo_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_photo_add',
'url_args': [],
},
{
'url': 'admin:media_albums_photo_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_photo_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(MEDIA_ALBUMS={
'photos_enabled': False,
})
def test_photo_views_photos_disabled(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_photo_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_photo_add',
'url_args': [],
},
{
'url': 'admin:media_albums_photo_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_photo_delete',
'url_args': [1],
},
]
for test in tests:
self.assertRaises(
NoReverseMatch,
reverse,
test['url'],
args=test['url_args'],
)
def test_userphoto_views_are_disabled_by_default(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_userphoto_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_userphoto_add',
'url_args': [],
},
{
'url': 'admin:media_albums_userphoto_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_userphoto_delete',
'url_args': [1],
},
]
for test in tests:
self.assertRaises(
NoReverseMatch,
reverse,
test['url'],
args=test['url_args'],
)
@override_settings(MEDIA_ALBUMS={
'user_uploaded_photos_enabled': True,
})
def test_userphoto_views_uploads_enabled(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_userphoto_changelist',
'url_args': [],
'expected_status_code': 200,
},
{
'url': 'admin:media_albums_userphoto_add',
'url_args': [],
'expected_status_code': 403,
},
{
'url': 'admin:media_albums_userphoto_change',
'url_args': [32],
'expected_status_code': 200,
},
{
'url': 'admin:media_albums_userphoto_delete',
'url_args': [32],
'expected_status_code': 200,
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(
response.status_code,
test['expected_status_code']
)
@override_settings(MEDIA_ALBUMS={
'user_uploaded_photos_enabled': True,
})
def test_userphoto_approval(self):
self.reload()
url = reverse('admin:media_albums_userphoto_changelist')
user_photo_pk = 32
album_kwargs = {
'name': 'User Photos',
'slug': 'user-photos',
}
self.assertEqual(UserPhoto.objects.filter(pk=user_photo_pk).count(), 1)
self.assertEqual(Album.objects.filter(**album_kwargs).count(), 0)
response = self.client.post(url, {
'action': 'approve_photo',
'select_across': '0',
'index': '0',
'_selected_action': str(user_photo_pk),
})
self.assertEqual(response.status_code, 302)
self.assertEqual(UserPhoto.objects.filter(pk=user_photo_pk).count(), 0)
album = Album.objects.get(**album_kwargs)
self.assertEqual(
album.photo_set.filter(
image='http://i.imgur.com/erJ6t2u.jpg',
).count(),
1
)
@override_settings(MEDIA_ALBUMS={
'user_uploaded_photos_enabled': True,
'user_uploaded_photos_album_name': 'Photos Uploaded by Users',
'user_uploaded_photos_album_slug': 'photos-from-users',
})
def test_userphoto_approval_with_custom_album_name_and_slug(self):
self.reload()
url = reverse('admin:media_albums_userphoto_changelist')
user_photo_pk = 32
album_kwargs = {
'name': 'Photos Uploaded by Users',
'slug': 'photos-from-users',
}
self.assertEqual(UserPhoto.objects.filter(pk=user_photo_pk).count(), 1)
self.assertEqual(Album.objects.filter(**album_kwargs).count(), 0)
response = self.client.post(url, {
'action': 'approve_photo',
'select_across': '0',
'index': '0',
'_selected_action': str(user_photo_pk),
})
self.assertEqual(response.status_code, 302)
self.assertEqual(UserPhoto.objects.filter(pk=user_photo_pk).count(), 0)
album = Album.objects.get(**album_kwargs)
self.assertEqual(
album.photo_set.filter(
image='http://i.imgur.com/erJ6t2u.jpg',
).count(),
1
)
def test_videofile_views_are_disabled_by_default(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_videofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_videofile_delete',
'url_args': [1],
},
]
for test in tests:
self.assertRaises(
NoReverseMatch,
reverse,
test['url'],
args=test['url_args'],
)
@override_settings(MEDIA_ALBUMS={
'video_files_enabled': True,
})
def test_videofile_views_video_enabled(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_videofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_videofile_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
if test['url'][-4:] == '_add' or test['url'][-7:] == '_change':
form = response.context['adminform'].form
self.assertFalse(form.fields['video_file_2'].required)
@override_settings(MEDIA_ALBUMS={
'video_files_enabled': True,
'video_files_format2_required': True,
})
def test_videofile_views_video_enabled_with_format2_required(self):
self.reload()
tests = [
{
'url': 'admin:media_albums_videofile_changelist',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_add',
'url_args': [],
},
{
'url': 'admin:media_albums_videofile_change',
'url_args': [1],
},
{
'url': 'admin:media_albums_videofile_delete',
'url_args': [1],
},
]
for test in tests:
url = reverse(test['url'], args=test['url_args'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
if test['url'][-4:] == '_add' or test['url'][-7:] == '_change':
form = response.context['adminform'].form
self.assertTrue(form.fields['video_file_2'].required)
| 29.602537
| 79
| 0.489144
| 1,279
| 14,002
| 5.035184
| 0.108679
| 0.071739
| 0.114286
| 0.129814
| 0.827484
| 0.815994
| 0.78587
| 0.748913
| 0.689752
| 0.662578
| 0
| 0.011228
| 0.389373
| 14,002
| 472
| 80
| 29.665254
| 0.741988
| 0
| 0
| 0.566586
| 0
| 0
| 0.224468
| 0.130767
| 0
| 0
| 0
| 0
| 0.060533
| 1
| 0.03632
| false
| 0.009685
| 0.024213
| 0
| 0.065375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8cd99a9a98a732b1baaaf3da7c82b94f80644a6f
| 33
|
py
|
Python
|
build/lib/annotation_utils/old/data_mover/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 13
|
2020-01-28T04:45:22.000Z
|
2022-03-10T03:35:49.000Z
|
build/lib/annotation_utils/old/data_mover/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 4
|
2020-02-14T08:56:03.000Z
|
2021-05-21T10:38:30.000Z
|
build/lib/annotation_utils/old/data_mover/__init__.py
|
HienDT27/annotation_utils
|
1f4e95f4cfa08de5bbab20f90a6a75fba66a69b9
|
[
"MIT"
] | 7
|
2020-04-10T07:56:25.000Z
|
2021-12-17T11:19:23.000Z
|
from .data_mover import DataMover
| 33
| 33
| 0.878788
| 5
| 33
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5079bd9b705341fabf8103ac1a9b7641dbd9772c
| 83
|
py
|
Python
|
src/cryptoadvance/specter/devices/hwi/jadepy/__init__.py
|
aphex3k/specter-desktop
|
f20b8447a9dcafb81461cc721e2978bf14fbc529
|
[
"MIT"
] | 683
|
2019-08-31T02:26:21.000Z
|
2022-03-31T18:43:31.000Z
|
src/cryptoadvance/specter/devices/hwi/jadepy/__init__.py
|
aphex3k/specter-desktop
|
f20b8447a9dcafb81461cc721e2978bf14fbc529
|
[
"MIT"
] | 1,100
|
2019-09-26T13:00:18.000Z
|
2022-03-31T22:29:54.000Z
|
src/cryptoadvance/specter/devices/hwi/jadepy/__init__.py
|
aphex3k/specter-desktop
|
f20b8447a9dcafb81461cc721e2978bf14fbc529
|
[
"MIT"
] | 179
|
2019-09-03T17:10:59.000Z
|
2022-03-31T16:59:13.000Z
|
from .jade import JadeAPI
from .jade_error import JadeError
__version__ = "0.0.1"
| 16.6
| 33
| 0.771084
| 13
| 83
| 4.538462
| 0.692308
| 0.271186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 0.144578
| 83
| 4
| 34
| 20.75
| 0.788732
| 0
| 0
| 0
| 0
| 0
| 0.060241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
50837bc2ab1e15486e1f301568857dbdd1a2c711
| 2,499
|
py
|
Python
|
src/apps/interface.py
|
summunity/DjangoReact_CLI
|
89f4b60028c04a800aa44f1476c63bab27e0b382
|
[
"MIT"
] | null | null | null |
src/apps/interface.py
|
summunity/DjangoReact_CLI
|
89f4b60028c04a800aa44f1476c63bab27e0b382
|
[
"MIT"
] | null | null | null |
src/apps/interface.py
|
summunity/DjangoReact_CLI
|
89f4b60028c04a800aa44f1476c63bab27e0b382
|
[
"MIT"
] | null | null | null |
from ..format_cmd import format_cmd_prompt
def launch_app( state, config ):
""" launch app state """
command_str = """
Which application do you want to launch:
"""
from .process import launch_app as launch_process
apps = config[config['launch'] == True]
for i in range(0, len(apps)):
app_name = apps.loc[i]['title']
command_str += '%s: %s\n' % (i+1, app_name)
command_str += 'b: back\n'
command_str = format_cmd_prompt(command_str)
user_input = input(command_str)
try : user_input = int(user_input) - 1
except:
if user_input == 'b' or user_input == 'back': state = 0
else: print( 'Invalid Input : %s' % user_input)
return state, None
# catch error when supplied value is greater than # of apps
if user_input > len(apps) :
print( 'Invalid Input : %s' % user_input)
return state, None
app = launch_process( apps.loc[user_input] )
# set the state to return to the main menu
state = 0
return state, app
def list_apps( state, active_threads ):
""" prints a list of all active App threads """
command_str = """
Active Apps:
"""
for i in range(0, len(active_threads)):
app_name = active_threads[i].name
command_str += '%s: %s\n' % (i+1, app_name)
command_str = format_cmd_prompt(command_str)
print( command_str)
# set the state to return to the main menu
state = 0
return state
def kill_app( state, active_threads ):
""" prints a list of all active App threads """
from ..thread import kill_process
command_str = """
Active Apps:
"""
for i in range(0, len(active_threads)):
app_name = active_threads[i].name
command_str += '%s: %s\n' % (i+1, app_name)
command_str += 'b: back\n'
command_str = format_cmd_prompt(command_str)
user_input = input(command_str)
try : user_input = int(user_input) - 1
except:
if user_input == 'b' or user_input == 'back': state = 0
else: print( 'Invalid Input : %s' % user_input)
return state, active_threads
# catch error when supplied value is greater than # of apps
if user_input > len(active_threads) :
print( 'Invalid Input : %s' % user_input)
return state, active_threads
proc = active_threads.pop(user_input)
kill_process(proc)
# set the state to return to the main menu
state = 0
return state, active_threads
| 23.8
| 63
| 0.619048
| 355
| 2,499
| 4.16338
| 0.2
| 0.109608
| 0.047361
| 0.048714
| 0.744926
| 0.744926
| 0.734777
| 0.717862
| 0.717862
| 0.686739
| 0
| 0.007163
| 0.273709
| 2,499
| 104
| 64
| 24.028846
| 0.807163
| 0.134854
| 0
| 0.689655
| 0
| 0
| 0.112828
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.051724
| 0
| 0.224138
| 0.086207
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50a5a6f43c87b813473c45ede99e9048ea56960c
| 184
|
py
|
Python
|
abattlemetrics/__init__.py
|
thegamecracks/abattlemetrics
|
de7d75fbf306ee61279ac1503a24766cd23ceaaf
|
[
"MIT"
] | 2
|
2021-07-08T02:34:28.000Z
|
2021-07-30T05:07:20.000Z
|
abattlemetrics/__init__.py
|
thegamecracks/abattlemetrics
|
de7d75fbf306ee61279ac1503a24766cd23ceaaf
|
[
"MIT"
] | null | null | null |
abattlemetrics/__init__.py
|
thegamecracks/abattlemetrics
|
de7d75fbf306ee61279ac1503a24766cd23ceaaf
|
[
"MIT"
] | null | null | null |
__version__ = '0.5.0'
from .client import *
from .datapoint import *
from .errors import *
from .iterators import *
from .player import *
from .server import *
from .session import *
| 18.4
| 24
| 0.722826
| 25
| 184
| 5.16
| 0.48
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019737
| 0.173913
| 184
| 9
| 25
| 20.444444
| 0.828947
| 0
| 0
| 0
| 0
| 0
| 0.027174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.875
| 0
| 0.875
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
50ac97d345eb11b975c095c57f64f82818521478
| 7,886
|
py
|
Python
|
conans/test/functional/generators/cmake_find_package_multi_configs_test.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | 1
|
2021-08-05T15:33:08.000Z
|
2021-08-05T15:33:08.000Z
|
conans/test/functional/generators/cmake_find_package_multi_configs_test.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | null | null | null |
conans/test/functional/generators/cmake_find_package_multi_configs_test.py
|
czoido/conan
|
81370e515f3ed7ba0486d6ffeb4def8a2765ae14
|
[
"MIT"
] | null | null | null |
import os
import platform
import textwrap
import unittest
import pytest
from conans.test.assets.sources import gen_function_cpp
from conans.test.utils.tools import TestClient
from conans.util.files import save, load
@pytest.mark.tool_cmake
@unittest.skipUnless(platform.system() == "Windows", "Only for windows")
class CustomConfigurationTest(unittest.TestCase):
conanfile = textwrap.dedent("""
from conans import ConanFile
from conan.tools.cmake import CMakeDeps
class App(ConanFile):
settings = "os", "arch", "compiler", "build_type"
requires = "hello/0.1"
def generate(self):
cmake = CMakeDeps(self)
cmake.configurations.append("ReleaseShared")
if self.options["hello"].shared:
cmake.configuration = "ReleaseShared"
cmake.generate()
def imports(self):
config = str(self.settings.build_type)
if self.options["hello"].shared:
config = "ReleaseShared"
self.copy("*.dll", src="bin", dst=config, keep_path=False)
""")
app = gen_function_cpp(name="main", includes=["hello"], calls=["hello"])
cmakelist = textwrap.dedent("""
set(CMAKE_CONFIGURATION_TYPES Debug Release ReleaseShared CACHE STRING
"Available build-types: Debug, Release and ReleaseShared")
cmake_minimum_required(VERSION 2.8)
project(App C CXX)
set(CMAKE_PREFIX_PATH ${CMAKE_BINARY_DIR} ${CMAKE_PREFIX_PATH})
set(CMAKE_MODULE_PATH ${CMAKE_BINARY_DIR} ${CMAKE_MODULE_PATH})
set(CMAKE_CXX_FLAGS_RELEASESHARED ${CMAKE_CXX_FLAGS_RELEASE})
set(CMAKE_C_FLAGS_RELEASESHARED ${CMAKE_C_FLAGS_RELEASE})
set(CMAKE_EXE_LINKER_FLAGS_RELEASESHARED ${CMAKE_EXE_LINKER_FLAGS_RELEASE})
find_package(hello REQUIRED)
add_executable(app app.cpp)
target_link_libraries(app PRIVATE hello::hello)
""")
def setUp(self):
self.client = TestClient(path_with_spaces=False)
self.client.run("new hello/0.1 -s")
self.client.run("create . hello/0.1@ -s compiler.version=15 "
"-s build_type=Release -o hello:shared=True")
self.client.run("create . hello/0.1@ -s compiler.version=15 "
"-s build_type=Release")
# Prepare the actual consumer package
self.client.save({"conanfile.py": self.conanfile,
"CMakeLists.txt": self.cmakelist,
"app.cpp": self.app})
def test_generator_multi(self):
settings = {"compiler": "Visual Studio",
"compiler.version": "15",
"arch": "x86_64",
"build_type": "Release",
}
settings = " ".join('-s %s="%s"' % (k, v) for k, v in settings.items() if v)
# Run the configure corresponding to this test case
with self.client.chdir('build'):
self.client.run("install .. %s -o hello:shared=True" % settings)
self.client.run("install .. %s -o hello:shared=False" % settings)
self.assertTrue(os.path.isfile(os.path.join(self.client.current_folder,
"helloTarget-releaseshared.cmake")))
self.assertTrue(os.path.isfile(os.path.join(self.client.current_folder,
"helloTarget-release.cmake")))
self.client.run_command('cmake .. -G "Visual Studio 15 Win64"')
self.client.run_command('cmake --build . --config Release')
self.client.run_command(r"Release\\app.exe")
self.assertIn("hello/0.1: Hello World Release!", self.client.out)
self.assertIn("main: Release!", self.client.out)
self.client.run_command('cmake --build . --config ReleaseShared')
self.client.run_command(r"ReleaseShared\\app.exe")
self.assertIn("hello/0.1: Hello World Release!", self.client.out)
self.assertIn("main: Release!", self.client.out)
@pytest.mark.tool_cmake
@unittest.skipUnless(platform.system() == "Windows", "Only for windows")
class CustomSettingsTest(unittest.TestCase):
conanfile = textwrap.dedent("""
from conans import ConanFile
from conan.tools.cmake import CMakeDeps
class App(ConanFile):
settings = "os", "arch", "compiler", "build_type"
requires = "hello/0.1"
def generate(self):
cmake = CMakeDeps(self)
#cmake.configurations.append("MyRelease")
cmake.generate()
""")
app = gen_function_cpp(name="main", includes=["hello"], calls=["hello"])
cmakelist = textwrap.dedent("""
set(CMAKE_CONFIGURATION_TYPES Debug Release MyRelease CACHE STRING
"Available build-types: Debug, Release and MyRelease")
cmake_minimum_required(VERSION 2.8)
project(App C CXX)
set(CMAKE_PREFIX_PATH ${CMAKE_BINARY_DIR} ${CMAKE_PREFIX_PATH})
set(CMAKE_MODULE_PATH ${CMAKE_BINARY_DIR} ${CMAKE_MODULE_PATH})
set(CMAKE_CXX_FLAGS_MYRELEASE ${CMAKE_CXX_FLAGS_RELEASE})
set(CMAKE_C_FLAGS_MYRELEASE ${CMAKE_C_FLAGS_RELEASE})
set(CMAKE_EXE_LINKER_FLAGS_MYRELEASE ${CMAKE_EXE_LINKER_FLAGS_RELEASE})
find_package(hello REQUIRED)
add_executable(app app.cpp)
target_link_libraries(app PRIVATE hello::hello)
""")
def setUp(self):
self.client = TestClient(path_with_spaces=False)
settings = load(self.client.cache.settings_path)
settings = settings.replace("Release", "MyRelease")
save(self.client.cache.settings_path, settings)
self.client.run("new hello/0.1 -s")
cmake = self.client.load("src/CMakeLists.txt")
cmake = cmake.replace("cmake_minimum_required", """
set(CMAKE_CONFIGURATION_TYPES Debug MyRelease Release CACHE STRING "Types")
cmake_minimum_required""")
cmake = cmake.replace("conan_basic_setup()", """
conan_basic_setup()
set(CMAKE_CXX_FLAGS_MYRELEASE ${CMAKE_CXX_FLAGS_RELEASE})
set(CMAKE_C_FLAGS_MYRELEASE ${CMAKE_C_FLAGS_RELEASE})
set(CMAKE_EXE_LINKER_FLAGS_MYRELEASE ${CMAKE_EXE_LINKER_FLAGS_RELEASE})
""")
self.client.save({"src/CMakeLists.txt": cmake})
self.client.run("create . hello/0.1@ -s compiler.version=15 -s build_type=MyRelease")
# Prepare the actual consumer package
self.client.save({"conanfile.py": self.conanfile,
"CMakeLists.txt": self.cmakelist,
"app.cpp": self.app})
def test_generator_multi(self):
settings = {"compiler": "Visual Studio",
"compiler.version": "15",
"arch": "x86_64",
"build_type": "MyRelease",
}
settings = " ".join('-s %s="%s"' % (k, v) for k, v in settings.items() if v)
# Run the configure corresponding to this test case
build_directory = os.path.join(self.client.current_folder, "build").replace("\\", "/")
with self.client.chdir(build_directory):
self.client.run("install .. %s" % settings)
self.assertTrue(os.path.isfile(os.path.join(self.client.current_folder,
"helloTarget-myrelease.cmake")))
self.client.run_command('cmake .. -G "Visual Studio 15 Win64"')
self.client.run_command('cmake --build . --config MyRelease')
self.client.run_command(r"MyRelease\\app.exe")
self.assertIn("hello/0.1: Hello World Release!", self.client.out)
self.assertIn("main: Release!", self.client.out)
| 42.397849
| 94
| 0.607786
| 895
| 7,886
| 5.184358
| 0.174302
| 0.077586
| 0.044828
| 0.034483
| 0.800647
| 0.755172
| 0.740086
| 0.725216
| 0.673922
| 0.665302
| 0
| 0.008702
| 0.271367
| 7,886
| 185
| 95
| 42.627027
| 0.798817
| 0.021684
| 0
| 0.611111
| 0
| 0
| 0.537095
| 0.159792
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.027778
| false
| 0
| 0.090278
| 0
| 0.173611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50c9ea58236835cc65bc39318e5c5e1ef8ef52c1
| 169
|
py
|
Python
|
convnet3d/__init__.py
|
lpigou/Theano-3D-ConvNet
|
e145dc47b689656db7d2baf8e4eeeeaa34fd86d0
|
[
"Unlicense"
] | 89
|
2015-03-02T13:42:26.000Z
|
2021-11-04T12:33:25.000Z
|
convnet3d/__init__.py
|
conanhung/Theano-3D-ConvNet
|
e145dc47b689656db7d2baf8e4eeeeaa34fd86d0
|
[
"Unlicense"
] | 3
|
2015-06-29T22:06:15.000Z
|
2017-06-20T12:30:16.000Z
|
convnet3d/__init__.py
|
conanhung/Theano-3D-ConvNet
|
e145dc47b689656db7d2baf8e4eeeeaa34fd86d0
|
[
"Unlicense"
] | 58
|
2015-02-22T00:18:01.000Z
|
2021-05-19T07:35:00.000Z
|
from convnet3d import ConvLayer, NormLayer, PoolLayer, RectLayer
from mlp import LogRegr, HiddenLayer, DropoutLayer
from activations import relu, tanh, sigmoid, softplus
| 56.333333
| 64
| 0.840237
| 20
| 169
| 7.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006667
| 0.112426
| 169
| 3
| 65
| 56.333333
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0f9b8f01b669d138c834d6e7958c0def6a7ca617
| 66
|
py
|
Python
|
g/appengine/py/standard/signup/handlers/__init__.py
|
chhschou/sandpit
|
d4a6760905b45b90455f10a5b50af3c5f743e445
|
[
"MIT"
] | null | null | null |
g/appengine/py/standard/signup/handlers/__init__.py
|
chhschou/sandpit
|
d4a6760905b45b90455f10a5b50af3c5f743e445
|
[
"MIT"
] | null | null | null |
g/appengine/py/standard/signup/handlers/__init__.py
|
chhschou/sandpit
|
d4a6760905b45b90455f10a5b50af3c5f743e445
|
[
"MIT"
] | null | null | null |
from mainpage import MainPage
from welcomepage import WelcomePage
| 22
| 35
| 0.878788
| 8
| 66
| 7.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 2
| 36
| 33
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0fb9b00645bbf8fc16fba1e27207cd322eeab010
| 68
|
py
|
Python
|
kin_phase1/model/testing.py
|
leekh7411/ai-hackathon-2018
|
70bc064103bbf8ea960182027e239d7cd43539a1
|
[
"MIT"
] | null | null | null |
kin_phase1/model/testing.py
|
leekh7411/ai-hackathon-2018
|
70bc064103bbf8ea960182027e239d7cd43539a1
|
[
"MIT"
] | null | null | null |
kin_phase1/model/testing.py
|
leekh7411/ai-hackathon-2018
|
70bc064103bbf8ea960182027e239d7cd43539a1
|
[
"MIT"
] | null | null | null |
import numpy as np
a = np.array([1,2,3,4])
print(a[:3])
print(a[3:])
| 17
| 23
| 0.602941
| 17
| 68
| 2.411765
| 0.647059
| 0.292683
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.117647
| 68
| 4
| 24
| 17
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0ffdd703e7010ed5d2522bfb23ea3624a4689a23
| 30
|
py
|
Python
|
src/whiteCalculator/ERRORS.py
|
WhiteNightAWA/calculator-api
|
e5c1194b6f9a14259827f121368d4ebc70b2c17c
|
[
"MIT"
] | 4
|
2021-09-14T22:17:17.000Z
|
2022-03-28T10:36:26.000Z
|
src/whiteCalculator/ERRORS.py
|
WhiteNightAWA/white-calculator
|
e5c1194b6f9a14259827f121368d4ebc70b2c17c
|
[
"MIT"
] | null | null | null |
src/whiteCalculator/ERRORS.py
|
WhiteNightAWA/white-calculator
|
e5c1194b6f9a14259827f121368d4ebc70b2c17c
|
[
"MIT"
] | null | null | null |
class BracketsError:
pass
| 10
| 20
| 0.733333
| 3
| 30
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.233333
| 30
| 2
| 21
| 15
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ba05120044eb2eacdebe5457d0df4f15a927dfa1
| 51
|
py
|
Python
|
tool_belt/__init__.py
|
johnk314/Python-Projects
|
f3e5f265fe4566d86e40a3c06dfb4cef23776085
|
[
"MIT"
] | null | null | null |
tool_belt/__init__.py
|
johnk314/Python-Projects
|
f3e5f265fe4566d86e40a3c06dfb4cef23776085
|
[
"MIT"
] | null | null | null |
tool_belt/__init__.py
|
johnk314/Python-Projects
|
f3e5f265fe4566d86e40a3c06dfb4cef23776085
|
[
"MIT"
] | null | null | null |
from tool_belt import connect, calculate, schedule
| 25.5
| 50
| 0.843137
| 7
| 51
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 51
| 1
| 51
| 51
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e866b388acfda0220997586ac84c01a56dd5d2cf
| 25
|
py
|
Python
|
unravel/text/legal/glossary.py
|
unravel-text/unravel
|
80ed4599efe0a845cfb9712e842615129b1fcbf7
|
[
"Apache-2.0"
] | null | null | null |
unravel/text/legal/glossary.py
|
unravel-text/unravel
|
80ed4599efe0a845cfb9712e842615129b1fcbf7
|
[
"Apache-2.0"
] | 2
|
2018-10-31T12:09:55.000Z
|
2018-10-31T12:16:56.000Z
|
unravel/text/legal/glossary.py
|
unravel-text/unravel
|
80ed4599efe0a845cfb9712e842615129b1fcbf7
|
[
"Apache-2.0"
] | null | null | null |
class Glossary:
pass
| 8.333333
| 15
| 0.68
| 3
| 25
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.28
| 25
| 2
| 16
| 12.5
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e8b49c4c6c3d0b6003a525df7f8e7dfd14ed9e5a
| 164
|
py
|
Python
|
src/pygamesimplegui/__init__.py
|
PraneethJain/PyGame-GUI
|
10e779b50cbc0b6cdf85d74876d743bcead6805c
|
[
"MIT"
] | null | null | null |
src/pygamesimplegui/__init__.py
|
PraneethJain/PyGame-GUI
|
10e779b50cbc0b6cdf85d74876d743bcead6805c
|
[
"MIT"
] | null | null | null |
src/pygamesimplegui/__init__.py
|
PraneethJain/PyGame-GUI
|
10e779b50cbc0b6cdf85d74876d743bcead6805c
|
[
"MIT"
] | null | null | null |
import pygame as pg
pg.init()
from .button import Button
from .input_box import InputBox
from .link import Link
from .menu import Menu
from .slider import Slider
| 16.4
| 31
| 0.786585
| 27
| 164
| 4.740741
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164634
| 164
| 9
| 32
| 18.222222
| 0.934307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2cdfde892efb49819d78ce41fa9f51b12beec496
| 5,339
|
py
|
Python
|
vegadns_cli/commands/accounts.py
|
shupp/VegaDNS-CLI
|
d2b4cc41649f3f549774ebf206302841f3db41d0
|
[
"Apache-2.0"
] | 3
|
2017-10-03T23:11:20.000Z
|
2021-07-19T17:06:26.000Z
|
vegadns_cli/commands/accounts.py
|
shupp/VegaDNS-CLI
|
d2b4cc41649f3f549774ebf206302841f3db41d0
|
[
"Apache-2.0"
] | null | null | null |
vegadns_cli/commands/accounts.py
|
shupp/VegaDNS-CLI
|
d2b4cc41649f3f549774ebf206302841f3db41d0
|
[
"Apache-2.0"
] | 5
|
2017-06-13T04:34:41.000Z
|
2022-02-04T05:35:35.000Z
|
from builtins import str
import click
import json
import logging
from vegadns_client.exceptions import ClientException
from vegadns_cli.common import accounts
logger = logging.getLogger(__name__)
@accounts.command()
@click.option(
"--account-id",
type=int,
prompt=True,
help="ID of the account, required"
)
@click.pass_context
def delete(ctx, account_id):
"""Delete an account"""
try:
a = ctx.obj['client'].account(account_id)
a.delete()
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
@accounts.command()
@click.option(
"--password",
type=str,
prompt=True,
hide_input=True,
help="Clear text password, required"
)
@click.option(
"--account-id",
type=int,
prompt=True,
help="Account ID, required"
)
@click.pass_context
def set_password(ctx, account_id, password):
"""Set the password for an account"""
try:
a = ctx.obj['client'].account(account_id)
data = {
'first_name': a.values["first_name"],
'last_name': a.values["last_name"],
'email': a.values["email"],
'account_type': a.values["account_type"],
'phone': a.values["phone"],
'status': a.values["status"],
'password': password
}
account = a.edit(data)
click.echo(json.dumps(a.values, indent=4))
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
@accounts.command()
@click.option(
"--status",
type=str,
prompt=False,
help="Account status, defaults to 'active'"
)
@click.option(
"--phone",
type=str,
prompt=False,
help="Phone number, optional"
)
@click.option(
"--account-type",
type=str,
prompt=True,
help="Account type, one of 'senior_admin' or 'user', required"
)
@click.option(
"--email",
type=str,
prompt=True,
help="Email address, required"
)
@click.option(
"--last-name",
type=str,
prompt=True,
help="Last Name, required"
)
@click.option(
"--first-name",
type=str,
prompt=True,
help="First Name, required"
)
@click.option(
"--account-id",
type=int,
prompt=True,
help="Account ID, required"
)
@click.pass_context
def edit(ctx, account_id, first_name, last_name,
email, account_type, phone, status='active'):
"""Edit an account"""
try:
a = ctx.obj['client'].account(account_id)
data = {
'first_name': first_name,
'last_name': last_name,
'email': email,
'account_type': account_type,
'phone': phone,
'status': status
}
account = a.edit(data)
click.echo(json.dumps(a.values, indent=4))
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
@accounts.command()
@click.option(
"--phone",
type=str,
prompt=False,
help="Phone number, optional"
)
@click.option(
"--password",
type=str,
prompt=True,
help="Clear text password, required"
)
@click.option(
"--account-type",
type=str,
prompt=True,
help="Account type, one of 'senior_admin' or 'user', required"
)
@click.option(
"--email",
type=str,
prompt=True,
help="Email address, required"
)
@click.option(
"--last-name",
type=str,
prompt=True,
help="Last Name, required"
)
@click.option(
"--first-name",
type=str,
prompt=True,
help="First Name, required"
)
@click.pass_context
def create(ctx, first_name, last_name, email,
account_type, password, phone):
"""Create an account"""
try:
data = {
'first_name': first_name,
'last_name': last_name,
'email': email,
'account_type': account_type,
'password': password,
'phone': phone
}
a = ctx.obj['client'].accounts.create(data)
click.echo(json.dumps(a.values, indent=4))
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
@accounts.command()
@click.option(
"--account-id",
type=int,
prompt=True,
help="ID of the account, required"
)
@click.pass_context
def get(ctx, account_id):
"""Get a single account"""
try:
a = ctx.obj['client'].account(account_id)
click.echo(json.dumps(a.values, indent=4))
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
@accounts.command()
@click.option(
"--search",
default=False,
help="Optionally search accounts"
)
@click.pass_context
def list(ctx, search):
"""List all accounts"""
try:
collection = ctx.obj['client'].accounts(search)
accounts = []
for account in collection:
accounts.append(account.values)
click.echo(json.dumps(accounts, indent=4))
except ClientException as e:
click.echo("Error: " + str(e.code))
click.echo("Response: " + str(e.message))
ctx.exit(1)
| 23.31441
| 66
| 0.587376
| 645
| 5,339
| 4.787597
| 0.137985
| 0.064119
| 0.058938
| 0.055052
| 0.736075
| 0.724417
| 0.724417
| 0.688148
| 0.688148
| 0.656736
| 0
| 0.002778
| 0.258288
| 5,339
| 228
| 67
| 23.416667
| 0.77702
| 0.022851
| 0
| 0.710145
| 0
| 0
| 0.196683
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0.067633
| 0.028986
| 0
| 0.057971
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
2ce73f0c3b447191fe5e0aabee0ec92948123804
| 46
|
py
|
Python
|
cyclon_project/api/models.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | null | null | null |
cyclon_project/api/models.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | 26
|
2017-10-23T08:04:00.000Z
|
2021-06-10T18:46:22.000Z
|
cyclon_project/api/models.py
|
robzenn92/EpTODocker
|
7e3f17bf2d914ee8aa5c7d6393cb65d48177bd71
|
[
"MIT"
] | null | null | null |
from .cyclon import Cyclon
cyclon = Cyclon()
| 11.5
| 26
| 0.73913
| 6
| 46
| 5.666667
| 0.5
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 3
| 27
| 15.333333
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fa02250d59f6b3ca738f6b65d8d7976ab2a44b53
| 120
|
py
|
Python
|
deepxde/backend/tensorflow_compat_v1/__init__.py
|
mitchelldaneker/deepxde
|
62e09b62ceaab6bda2ebbd02dc30ad99c2990302
|
[
"Apache-2.0"
] | 955
|
2019-06-21T21:56:02.000Z
|
2022-03-31T03:44:45.000Z
|
deepxde/backend/tensorflow_compat_v1/__init__.py
|
XRDevIEEE/deepxde
|
961e7e2bf1624374e74bf8d2da6b9c3e0eb8b0cc
|
[
"Apache-2.0"
] | 517
|
2019-07-25T16:47:44.000Z
|
2022-03-31T17:37:58.000Z
|
deepxde/backend/tensorflow_compat_v1/__init__.py
|
XRDevIEEE/deepxde
|
961e7e2bf1624374e74bf8d2da6b9c3e0eb8b0cc
|
[
"Apache-2.0"
] | 374
|
2019-06-24T00:44:16.000Z
|
2022-03-30T08:17:36.000Z
|
import os
os.environ["TF_FORCE_GPU_ALLOW_GROWTH"] = "true"
from .tensor import * # pylint: disable=redefined-builtin
| 20
| 58
| 0.758333
| 17
| 120
| 5.117647
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 5
| 59
| 24
| 0.828571
| 0.275
| 0
| 0
| 0
| 0
| 0.341176
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fa0e0e6fa6ce6882367aa3ff00e316bdee5335d8
| 189
|
py
|
Python
|
OS/OS_test.py
|
coderlongren/PreliminaryPython
|
b5c7a87e41842c57aabb660de1514cba19c8bd78
|
[
"MIT"
] | null | null | null |
OS/OS_test.py
|
coderlongren/PreliminaryPython
|
b5c7a87e41842c57aabb660de1514cba19c8bd78
|
[
"MIT"
] | null | null | null |
OS/OS_test.py
|
coderlongren/PreliminaryPython
|
b5c7a87e41842c57aabb660de1514cba19c8bd78
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
import os
print(os.name)
# print(os.uname()) windows 上不存在次函数
print(os.environ)
print(os.environ.get("PATH"))
print(os.environ.get("JAVA_HOME"))
| 17.181818
| 35
| 0.68254
| 30
| 189
| 4.266667
| 0.6
| 0.273438
| 0.328125
| 0.265625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011696
| 0.095238
| 189
| 10
| 36
| 18.9
| 0.736842
| 0.386243
| 0
| 0
| 0
| 0
| 0.115044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0.8
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fa34e970fb38cb560c0117e368ad8db27281d55d
| 3,116
|
py
|
Python
|
source/functions/reptile_movie.py
|
Kung-327/CampusCyberInspectionTool2021
|
2ff64a18d750ff48ba6091bd1bf5d59bcaefa88b
|
[
"MIT"
] | null | null | null |
source/functions/reptile_movie.py
|
Kung-327/CampusCyberInspectionTool2021
|
2ff64a18d750ff48ba6091bd1bf5d59bcaefa88b
|
[
"MIT"
] | null | null | null |
source/functions/reptile_movie.py
|
Kung-327/CampusCyberInspectionTool2021
|
2ff64a18d750ff48ba6091bd1bf5d59bcaefa88b
|
[
"MIT"
] | null | null | null |
import csv
import requests
from bs4 import BeautifulSoup
i=1
movieschoose=['thisweek','intheaters','comingsoon']
class reptile_movie:
def i_want_to_watch_movie():
print("你想看什麼時期?")
print("[1]本周新片")
print("[2]上映中")
class1 =int(input("[3]即將上映\n"))
for page in range(1,20):
url = 'https://movies.yahoo.com.tw/movie_'+movieschoose[class1-1]+'.html?page='+str(page)
response = requests.get(url=url)
soup = BeautifulSoup(response.text, 'lxml')
info_items = soup.find_all('div', 'release_info')
with open('本週新片.csv', 'w', encoding='utf-8', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['電影片名', '電影英文片名', '上映時間', '網友期待度'])
for item in info_items:
name = item.find('div', 'release_movie_name').a.text.strip()
english_name = item.find('div', 'en').a.text.strip()
release_time = item.find('div', 'release_movie_time').text.split(':')[-1].strip()
level = item.find('div', 'leveltext').span.text.strip()
csv_writer.writerow([name, english_name, release_time, level])
print('{}({}) 上映日:{} 期待度:{}'.format(name, english_name, release_time, level))
with open('上映中.csv', 'w', encoding='utf-8', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['電影片名', '電影英文片名', '上映時間', '網友期待度'])
for item in info_items:
name = item.find('div', 'release_movie_name').a.text.strip()
english_name = item.find('div', 'en').a.text.strip()
release_time = item.find('div', 'release_movie_time').text.split(':')[-1].strip()
level = item.find('div', 'leveltext').span.text.strip()
csv_writer.writerow([name, english_name, release_time, level])
print('{}({}) 上映日:{} 期待度:{}'.format(name, english_name, release_time, level))
with open('即將上映.csv', 'w', encoding='utf-8', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['電影片名', '電影英文片名', '上映時間', '網友期待度'])
for item in info_items:
name = item.find('div', 'release_movie_name').a.text.strip()
english_name = item.find('div', 'en').a.text.strip()
release_time = item.find('div', 'release_movie_time').text.split(':')[-1].strip()
level = item.find('div', 'leveltext').span.text.strip()
csv_writer.writerow([name, english_name, release_time, level])
print('{}({}) 上映日:{} 期待度:{}'.format(name, english_name, release_time, level))
if __name__ == "__main__":
i_want_to_watch_movie()
| 48.6875
| 101
| 0.5138
| 347
| 3,116
| 4.414986
| 0.247839
| 0.070496
| 0.086162
| 0.062663
| 0.763055
| 0.740862
| 0.740862
| 0.740862
| 0.740862
| 0.740862
| 0
| 0.008103
| 0.326701
| 3,116
| 64
| 102
| 48.6875
| 0.722116
| 0
| 0
| 0.5625
| 0
| 0
| 0.150144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0.0625
| 0
| 0.104167
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fa3ac79f7cefbfce3106003c35a024f7206bbb18
| 27,588
|
py
|
Python
|
tests/scripts/test_generate_terraform.py
|
chellebodnar-google/public-datasets-pipelines
|
872a02fb06a7f536d06f20ef8dcf525575b1aea6
|
[
"Apache-2.0"
] | null | null | null |
tests/scripts/test_generate_terraform.py
|
chellebodnar-google/public-datasets-pipelines
|
872a02fb06a7f536d06f20ef8dcf525575b1aea6
|
[
"Apache-2.0"
] | null | null | null |
tests/scripts/test_generate_terraform.py
|
chellebodnar-google/public-datasets-pipelines
|
872a02fb06a7f536d06f20ef8dcf525575b1aea6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
import random
import re
import shutil
import subprocess
import tempfile
import pytest
from ruamel import yaml
from scripts import generate_terraform
PROJECT_ROOT = generate_terraform.PROJECT_ROOT
FILE_PATHS = {
"dataset": PROJECT_ROOT / "samples" / "dataset.yaml",
"pipeline": PROJECT_ROOT / "samples" / "pipeline.yaml",
"license": PROJECT_ROOT / "templates" / "airflow" / "license_header.py.jinja2",
}
ENV_PATH = PROJECT_ROOT / ".test"
ENV_DATASETS_PATH = ENV_PATH / "datasets"
yaml = yaml.YAML(typ="safe")
@pytest.fixture
def dataset_path():
with tempfile.TemporaryDirectory(
dir=generate_terraform.DATASETS_PATH, suffix="_dataset"
) as dir_path:
try:
yield pathlib.Path(dir_path)
finally:
shutil.rmtree(dir_path, ignore_errors=True)
@pytest.fixture
def pipeline_path(dataset_path, suffix="_pipeline"):
pipelines_dir = dataset_path / "pipelines"
pipelines_dir.mkdir(parents=True, exist_ok=True)
with tempfile.TemporaryDirectory(dir=pipelines_dir, suffix=suffix) as dir_path:
try:
yield pathlib.Path(dir_path)
finally:
shutil.rmtree(dir_path)
@pytest.fixture
def project_id() -> str:
return "test-gcp-project-id"
@pytest.fixture
def bucket_name_prefix() -> str:
return "1234-zyxwvu"
@pytest.fixture
def region() -> str:
return "us-east4"
@pytest.fixture
def impersonating_acct() -> str:
return "test-impersonator@project.iam.gserviceaccount.com"
@pytest.fixture
def gcs_bucket_resource() -> dict:
return {
"type": "storage_bucket",
"name": "{{ friendly_project_id }}.{{ dataset_id }}",
}
@pytest.fixture
def bq_table_resource() -> dict:
return {
"type": "bigquery_table",
"table_id": "test_bq_table",
"schema": [
{"name": "test_col_string", "type": "STRING"},
{"name": "test_col_int", "type": "INT64"},
{"name": "test_col_numeric", "type": "NUMERIC"},
{"name": "test_col_datetime", "type": "DATETIME"},
{"name": "test_col_struct", "type": "STRUCT"},
],
}
@pytest.fixture
def tf_state_bucket() -> str:
return "test-terraform-state-bucket"
@pytest.fixture
def tf_state_prefix() -> str:
return "test/terraform/state"
@pytest.fixture
def env() -> str:
return "test"
def set_dataset_ids_in_config_files(
dataset_path: pathlib.Path, pipeline_path: pathlib.Path
):
shutil.copyfile(FILE_PATHS["dataset"], dataset_path / "pipelines" / "dataset.yaml")
shutil.copyfile(FILE_PATHS["pipeline"], pipeline_path / "pipeline.yaml")
dataset_config = yaml.load(dataset_path / "pipelines" / "dataset.yaml")
dataset_config["dataset"]["name"] = dataset_path.name
for resource in dataset_config["resources"]:
if resource["type"] == "bigquery_dataset":
resource["dataset_id"] = dataset_path.name
yaml.dump(dataset_config, dataset_path / "pipelines" / "dataset.yaml")
pipeline_config = yaml.load(pipeline_path / "pipeline.yaml")
for resource in pipeline_config["resources"]:
if resource["type"] == "bigquery_table":
resource["dataset_id"] = dataset_path.name
yaml.dump(pipeline_config, pipeline_path / "pipeline.yaml")
def test_tf_templates_exist():
for _, filepath in generate_terraform.TEMPLATE_PATHS.items():
assert filepath.exists()
def test_main_generates_tf_files(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
assert (path_prefix / "variables.tf").exists()
assert not (
generate_terraform.DATASETS_PATH
/ dataset_path.name
/ "infra"
/ "terraform.tfvars"
).exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "terraform.tfvars"
).exists()
assert not (
generate_terraform.DATASETS_PATH / dataset_path.name / "infra" / "backend.tf"
).exists()
assert (ENV_DATASETS_PATH / dataset_path.name / "infra" / "backend.tf").exists()
def test_main_without_tf_remote_state_generates_tf_files_except_backend_tf(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
assert (path_prefix / "variables.tf").exists()
assert not (path_prefix / "backend.tf").exists()
assert not (
generate_terraform.DATASETS_PATH
/ dataset_path.name
/ "infra"
/ "terraform.tfvars"
).exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "terraform.tfvars"
).exists()
pipeline_path_2 = pipeline_path
def test_main_with_multiple_pipelines(
dataset_path,
pipeline_path,
pipeline_path_2,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
assert pipeline_path.name != pipeline_path_2.name
shutil.copyfile(FILE_PATHS["dataset"], dataset_path / "pipelines" / "dataset.yaml")
shutil.copyfile(FILE_PATHS["pipeline"], pipeline_path / "pipeline.yaml")
shutil.copyfile(FILE_PATHS["pipeline"], pipeline_path_2 / "pipeline.yaml")
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
assert (path_prefix / f"{pipeline_path_2.name}_pipeline.tf").exists()
assert (path_prefix / "variables.tf").exists()
assert not (
generate_terraform.DATASETS_PATH
/ dataset_path.name
/ "infra"
/ "terraform.tfvars"
).exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "terraform.tfvars"
).exists()
assert not (
generate_terraform.DATASETS_PATH / dataset_path.name / "infra" / "backend.tf"
).exists()
assert (ENV_DATASETS_PATH / dataset_path.name / "infra" / "backend.tf").exists()
def test_main_with_multiple_bq_dataset_ids(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
# First, declare an additional custom BQ dataset in dataset.yaml
another_dataset_id = "another_dataset"
assert another_dataset_id != dataset_path.name
dataset_config = yaml.load(dataset_path / "pipelines" / "dataset.yaml")
dataset_config["resources"].append(
{"type": "bigquery_dataset", "dataset_id": another_dataset_id}
)
yaml.dump(dataset_config, dataset_path / "pipelines" / "dataset.yaml")
# Then, add a BQ table under the additional BQ dataset
pipeline_config = yaml.load(pipeline_path / "pipeline.yaml")
pipeline_config["resources"].append(
{
"type": "bigquery_table",
"table_id": "another_table",
"dataset_id": another_dataset_id,
}
)
yaml.dump(pipeline_config, pipeline_path / "pipeline.yaml")
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
# Match the "google_bigquery_dataset" properties, i.e. any lines between the
# curly braces, in the *_dataset.tf file
regexp = r"\"google_bigquery_dataset\" \"" + r"[A-Za-z0-9_]+" + r"\" \{(.*?)\}"
bq_dataset_tf_string = re.compile(regexp, flags=re.MULTILINE | re.DOTALL)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
matches = bq_dataset_tf_string.findall(
(path_prefix / f"{dataset_path.name}_dataset.tf").read_text()
)
dataset_ids = set()
for match in matches:
result = re.search(r"dataset_id\s+\=\s+\"([A-Za-z0-9_]+)\"", match)
assert result.group(1)
dataset_ids.add(result.group(1))
# Assert that the dataset_ids are unique
assert len(dataset_ids) == len(matches)
assert another_dataset_id in dataset_ids
assert dataset_path.name in dataset_ids
def test_dataset_without_any_pipelines(
dataset_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
(dataset_path / "pipelines").mkdir(parents=True)
shutil.copyfile(FILE_PATHS["dataset"], dataset_path / "pipelines" / "dataset.yaml")
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert not (
generate_terraform.DATASETS_PATH
/ dataset_path.name
/ "infra"
/ "terraform.tfvars"
).exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "terraform.tfvars"
).exists()
assert not (
generate_terraform.DATASETS_PATH / dataset_path.name / "infra" / "backend.tf"
).exists()
assert (ENV_DATASETS_PATH / dataset_path.name / "infra" / "backend.tf").exists()
def test_dataset_path_does_not_exist(
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
with pytest.raises(FileNotFoundError):
generate_terraform.main(
"non_existing_dir",
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
def test_generated_tf_files_contain_license_headers(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
license_header = pathlib.Path(
generate_terraform.TEMPLATE_PATHS["license"]
).read_text()
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
assert (path_prefix / "provider.tf").read_text().count(license_header) == 1
assert (path_prefix / f"{dataset_path.name}_dataset.tf").read_text().count(
license_header
) == 1
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").read_text().count(
license_header
) == 1
assert (path_prefix / "variables.tf").read_text().count(license_header) == 1
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "terraform.tfvars"
).read_text().count(license_header) == 1
assert (
ENV_DATASETS_PATH / dataset_path.name / "infra" / "backend.tf"
).read_text().count(license_header) == 1
def test_dataset_tf_file_contains_description_when_specified(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
config = yaml.load(open(dataset_path / "pipelines" / "dataset.yaml"))
bq_dataset = next(
(r for r in config["resources"] if r["type"] == "bigquery_dataset"), None
)
assert bq_dataset
assert bq_dataset["description"]
# Match the "google_bigquery_dataset" properties, i.e. any lines between the
# curly braces, in the *_dataset.tf file
regexp = r"\"google_bigquery_dataset\" \"" + dataset_path.name + r"\" \{(.*?)\}"
bq_dataset_tf_string = re.compile(regexp, flags=re.MULTILINE | re.DOTALL)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
result = bq_dataset_tf_string.search(
(path_prefix / f"{dataset_path.name}_dataset.tf").read_text()
)
assert re.search(r"dataset_id\s+\=", result.group(1))
assert re.search(r"description\s+\=", result.group(1))
def test_bq_dataset_can_have_a_description_with_newlines_and_quotes(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
shutil.copyfile(FILE_PATHS["dataset"], dataset_path / "pipelines" / "dataset.yaml")
shutil.copyfile(FILE_PATHS["pipeline"], pipeline_path / "pipeline.yaml")
config = yaml.load(open(dataset_path / "pipelines" / "dataset.yaml"))
# Get a bigquery_dataset resource and modify the `description` field
bq_dataset = next(
(r for r in config["resources"] if r["type"] == "bigquery_dataset"), None
)
test_description = 'Multiline\nstring with\n"quotes"'
bq_dataset["description"] = test_description
with open(dataset_path / "pipelines" / "dataset.yaml", "w") as file:
yaml.dump(config, file)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
env_dataset_path = ENV_DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "fmt"], cwd=env_dataset_path / "infra")
def test_dataset_tf_has_no_bq_dataset_description_when_unspecified(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
config = yaml.load(open(dataset_path / "pipelines" / "dataset.yaml"))
# Get the first bigquery_dataset resource and delete the `description` field
bq_dataset = next(
(r for r in config["resources"] if r["type"] == "bigquery_dataset")
)
del bq_dataset["description"]
with open(dataset_path / "pipelines" / "dataset.yaml", "w") as file:
yaml.dump(config, file)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
# Match the "google_bigquery_dataset" properties, i.e. any lines between the
# curly braces, in the *_dataset.tf file
regexp = r"\"google_bigquery_dataset\" \"" + dataset_path.name + r"\" \{(.*?)\}"
bq_dataset_tf_string = re.compile(regexp, flags=re.MULTILINE | re.DOTALL)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
result = bq_dataset_tf_string.search(
(path_prefix / f"{dataset_path.name}_dataset.tf").read_text()
)
assert re.search(r"dataset_id\s+\=", result.group(1))
assert not re.search(r"description\s+\=", result.group(1))
def test_pipeline_tf_contains_optional_properties_when_specified(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
config = yaml.load(open(pipeline_path / "pipeline.yaml"))
bq_table = next(
(r for r in config["resources"] if r["type"] == "bigquery_table"), None
)
assert bq_table
assert bq_table["description"]
assert bq_table["time_partitioning"]
assert bq_table["clustering"]
assert bq_table["deletion_protection"]
# Match the "google_bigquery_table" properties, i.e. any lines between the
# curly braces, in the *_pipeline.tf file
regexp = (
r"\"google_bigquery_table\" \""
+ bq_table["dataset_id"]
+ "_"
+ bq_table["table_id"]
+ r"\" \{(.*?)^\}"
)
bq_table_tf_string = re.compile(regexp, flags=re.MULTILINE | re.DOTALL)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
result = bq_table_tf_string.search(
(path_prefix / f"{pipeline_path.name}_pipeline.tf").read_text()
)
assert re.search(r"table_id\s+\=", result.group(1))
assert re.search(r"description\s+\=", result.group(1))
assert re.search(r"time_partitioning\s+\{", result.group(1))
assert re.search(r"clustering\s+\=", result.group(1))
assert re.search(r"deletion_protection\s+\=", result.group(1))
def test_pipeline_tf_has_no_optional_properties_when_unspecified(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
config = yaml.load(open(pipeline_path / "pipeline.yaml"))
# Get the first bigquery_table resource and delete the `description` field
bq_table = next((r for r in config["resources"] if r["type"] == "bigquery_table"))
del bq_table["description"]
del bq_table["time_partitioning"]
del bq_table["clustering"]
del bq_table["deletion_protection"]
with open(pipeline_path / "pipeline.yaml", "w") as file:
yaml.dump(config, file)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
# Match the "google_bigquery_table" properties, i.e. any lines between the
# curly braces, in the *_pipeline.tf file
regexp = (
r"\"google_bigquery_table\" \""
+ bq_table["dataset_id"]
+ "_"
+ bq_table["table_id"]
+ r"\" \{(.*?)^\}"
)
bq_table_tf_string = re.compile(regexp, flags=re.MULTILINE | re.DOTALL)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
result = bq_table_tf_string.search(
(path_prefix / f"{pipeline_path.name}_pipeline.tf").read_text()
)
assert re.search(r"table_id\s+\=", result.group(1))
assert not re.search(r"description\s+\=", result.group(1))
assert not re.search(r"time_partitioning\s+\{", result.group(1))
assert not re.search(r"clustering\s+\=", result.group(1))
assert not re.search(r"deletion_protection\s+\=", result.group(1))
def test_bq_table_can_have_a_description_with_newlines_and_quotes(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
config = yaml.load(open(pipeline_path / "pipeline.yaml"))
# Get a bigquery_table resource and modify the `description` field
bq_table = next(
(r for r in config["resources"] if r["type"] == "bigquery_table"), None
)
bq_table["description"] = 'Multiline\nstring with\n"quotes"'
with open(pipeline_path / "pipeline.yaml", "w") as file:
yaml.dump(config, file)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
env_dataset_path = ENV_DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "fmt"], cwd=env_dataset_path / "infra")
def test_bq_table_name_starts_with_digits_but_tf_resource_name_does_not(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
config = yaml.load(open(pipeline_path / "pipeline.yaml"))
table_name_starting_with_digit = f"{str(random.randint(0, 9))}_table"
# In the YAML config, set the BigQuery table name to start with a digit
bq_table = next(
(r for r in config["resources"] if r["type"] == "bigquery_table"), None
)
bq_table["table_id"] = table_name_starting_with_digit
with open(pipeline_path / "pipeline.yaml", "w") as file:
yaml.dump(config, file)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
# Match the Terraform resource name and the table_id value in the BigQuery
# table's resource definition. As a concrete example, substrings in
# ALL_CAPS are matched below:
#
# resource "google_bigquery_table" "RESOURCE_NAME_STARTING_WITH_NONDIGIT" {
# description = ""
# table_id = "TABLE_NAME_STARTING_WITH_DIGIT"
# }
tf_resource_regexp = r"\"google_bigquery_table\" \"([a-zA-Z0-9_-]+)\" .*?"
table_id_regexp = r"table_id\s+\= \"(.*?)\"\n"
matcher = re.compile(
tf_resource_regexp + table_id_regexp,
flags=re.MULTILINE | re.DOTALL,
)
for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name / "infra",
generate_terraform.DATASETS_PATH / dataset_path.name / "infra",
):
result = matcher.search(
(path_prefix / f"{pipeline_path.name}_pipeline.tf").read_text()
)
tf_resource_name = result.group(1)
table_id = result.group(2)
assert table_id == table_name_starting_with_digit
assert not tf_resource_name[0].isdigit()
assert table_id[0].isdigit()
assert table_id in tf_resource_name
def test_bucket_names_must_not_contain_dots_and_google():
for name in (
"test.bucket.name",
"google-bucket",
"google.bucket.name",
"g00gle",
"googl3",
):
with pytest.raises(ValueError):
generate_terraform.validate_bucket_name(name)
def test_bucket_names_must_use_hyphens_instead_of_underscores():
for name in (
"test_underscore",
"test-bucket_with-underscore",
):
with pytest.raises(ValueError):
generate_terraform.validate_bucket_name(name)
def test_bucket_prefixes_must_use_hyphens_instead_of_underscores(
dataset_path,
project_id,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
):
for prefix in (
"test_prefix",
"test-hyphens_and_underscores",
):
with pytest.raises(ValueError):
generate_terraform.main(
dataset_path.name,
project_id,
prefix,
region,
impersonating_acct,
env,
tf_state_bucket,
tf_state_prefix,
)
def test_validation_on_generated_tf_files_in_dot_env_dir(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
env_dataset_path = ENV_DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "init"], cwd=env_dataset_path / "infra")
subprocess.check_call(["terraform", "validate"], cwd=env_dataset_path / "infra")
def test_validation_on_generated_tf_files_in_project_dir(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
set_dataset_ids_in_config_files(dataset_path, pipeline_path)
generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)
project_dataset_path = generate_terraform.DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "init"], cwd=(project_dataset_path / "infra"))
subprocess.check_call(
["terraform", "validate"], cwd=(project_dataset_path / "infra")
)
| 28.948583
| 87
| 0.652095
| 3,318
| 27,588
| 5.098252
| 0.085895
| 0.086486
| 0.067392
| 0.059825
| 0.763183
| 0.737881
| 0.720501
| 0.703712
| 0.674923
| 0.654706
| 0
| 0.00276
| 0.238292
| 27,588
| 952
| 88
| 28.978992
| 0.802227
| 0.07003
| 0
| 0.722222
| 0
| 0
| 0.137687
| 0.032363
| 0
| 0
| 0
| 0
| 0.092593
| 1
| 0.042328
| false
| 0
| 0.011905
| 0.011905
| 0.066138
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fa493fb83e7f6dcd12211af6436f0f32593e85ad
| 192
|
py
|
Python
|
fabfile.py
|
neuronalmotion/foobot-exporter
|
627ccbd0fd373ed315d387a61b2d6cbc363dd3a3
|
[
"MIT"
] | null | null | null |
fabfile.py
|
neuronalmotion/foobot-exporter
|
627ccbd0fd373ed315d387a61b2d6cbc363dd3a3
|
[
"MIT"
] | null | null | null |
fabfile.py
|
neuronalmotion/foobot-exporter
|
627ccbd0fd373ed315d387a61b2d6cbc363dd3a3
|
[
"MIT"
] | null | null | null |
from fabric.api import *
def prepare_dev_environment():
local('sudo ansible-galaxy install -r ansible/requirements.yml')
def deploy():
local('ansible-playbook ansible/site.yml')
| 19.2
| 72
| 0.729167
| 25
| 192
| 5.52
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151042
| 192
| 9
| 73
| 21.333333
| 0.846626
| 0
| 0
| 0
| 0
| 0
| 0.463158
| 0.126316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
fa6c2ed0ceb82993ffe04a603f9db8dbd15e2598
| 59
|
py
|
Python
|
BNNs/__init__.py
|
kw-lee/Bayesian-Neural-Networks
|
3327fcf85e47c15d86c872211427bff133880c34
|
[
"MIT"
] | 1
|
2020-05-06T01:38:24.000Z
|
2020-05-06T01:38:24.000Z
|
BNNs/__init__.py
|
kw-lee/Bayesian-Neural-Networks
|
3327fcf85e47c15d86c872211427bff133880c34
|
[
"MIT"
] | null | null | null |
BNNs/__init__.py
|
kw-lee/Bayesian-Neural-Networks
|
3327fcf85e47c15d86c872211427bff133880c34
|
[
"MIT"
] | null | null | null |
from BNNs import priors, utils, base_net, Bayes_By_Backprop
| 59
| 59
| 0.847458
| 10
| 59
| 4.7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 59
| 1
| 59
| 59
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d736184871f97648487ea3548c8995d249707dbc
| 106
|
py
|
Python
|
apps/overtime/admin.py
|
LHerdy/People_Manager
|
e35ba2333a26e1cf35b7234af10f3c849eaa0270
|
[
"MIT"
] | null | null | null |
apps/overtime/admin.py
|
LHerdy/People_Manager
|
e35ba2333a26e1cf35b7234af10f3c849eaa0270
|
[
"MIT"
] | 1
|
2021-08-15T15:02:10.000Z
|
2021-08-15T15:02:25.000Z
|
apps/overtime/admin.py
|
LHerdy/People_Manager
|
e35ba2333a26e1cf35b7234af10f3c849eaa0270
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from apps.overtime.models import Overtime
admin.site.register(Overtime)
| 21.2
| 41
| 0.839623
| 15
| 106
| 5.933333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 106
| 5
| 42
| 21.2
| 0.927083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d7593e7bdb5076e36f4b2f72e1409f6973e32fd7
| 95
|
py
|
Python
|
hangman/validations/__init__.py
|
eduard727/proyecto_introduccion_a_la_programacion
|
f4dc6467ccf5c00ee05db61118b847a2413a3f6c
|
[
"MIT"
] | null | null | null |
hangman/validations/__init__.py
|
eduard727/proyecto_introduccion_a_la_programacion
|
f4dc6467ccf5c00ee05db61118b847a2413a3f6c
|
[
"MIT"
] | 1
|
2021-06-02T00:27:27.000Z
|
2021-06-02T00:27:27.000Z
|
hangman/validations/__init__.py
|
eduard727/proyecto_introduccion_a_la_programacion
|
f4dc6467ccf5c00ee05db61118b847a2413a3f6c
|
[
"MIT"
] | 11
|
2019-10-10T21:09:43.000Z
|
2020-02-16T04:05:00.000Z
|
def valid_letter(letter):
# Make function to validate if input is a letter
return True
| 23.75
| 52
| 0.726316
| 15
| 95
| 4.533333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231579
| 95
| 3
| 53
| 31.666667
| 0.931507
| 0.484211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d7636c415ed45c4bc717ec01f85fc76f3c5125d3
| 224
|
py
|
Python
|
src/clearskies/tests/simple_api/models/users.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | 4
|
2021-04-23T18:13:06.000Z
|
2022-03-26T01:51:01.000Z
|
src/clearskies/tests/simple_api/models/users.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
src/clearskies/tests/simple_api/models/users.py
|
cmancone/clearskies
|
aaa33fef6d03205faf26f123183a46adc1dbef9c
|
[
"MIT"
] | null | null | null |
from clearskies import Models
from . import user
class Users(Models):
def __init__(self, cursor_backend, columns):
super().__init__(cursor_backend, columns)
def model_class(self):
return user.User
| 20.363636
| 49
| 0.705357
| 28
| 224
| 5.25
| 0.571429
| 0.176871
| 0.272109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209821
| 224
| 10
| 50
| 22.4
| 0.830508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.285714
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d76ea1d2fc73cd3907672ed9de6b10c28dfb32d9
| 25
|
py
|
Python
|
src/raman_fitting/delegating/__init__.py
|
MyPyDavid/raman_fitting
|
a827ab578ae801e185384159f145ae4dfad39549
|
[
"MIT"
] | 3
|
2021-03-03T21:02:11.000Z
|
2021-05-14T09:24:40.000Z
|
src/raman_fitting/delegating/__init__.py
|
MyPyDavid/raman_fitting
|
a827ab578ae801e185384159f145ae4dfad39549
|
[
"MIT"
] | 8
|
2021-06-25T22:54:53.000Z
|
2021-08-09T10:07:30.000Z
|
src/raman_fitting/delegating/__init__.py
|
MyPyDavid/raman_fitting
|
a827ab578ae801e185384159f145ae4dfad39549
|
[
"MIT"
] | 2
|
2021-07-08T09:49:49.000Z
|
2022-03-19T14:43:01.000Z
|
# import main_delegator#
| 12.5
| 24
| 0.8
| 3
| 25
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.863636
| 0.84
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d775e01a2750a1baa4b131411f8141d05335c5b6
| 124
|
py
|
Python
|
mbd_pay/__init__.py
|
shaoxyz/mbd_pay
|
4d3f5411fe376925e88f974530e1b8becb5733a4
|
[
"MIT"
] | null | null | null |
mbd_pay/__init__.py
|
shaoxyz/mbd_pay
|
4d3f5411fe376925e88f974530e1b8becb5733a4
|
[
"MIT"
] | null | null | null |
mbd_pay/__init__.py
|
shaoxyz/mbd_pay
|
4d3f5411fe376925e88f974530e1b8becb5733a4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@Time : 2021/4/21 23:34
@Author : github.com/shaoxyz
"""
from .client import Client
| 15.5
| 30
| 0.556452
| 17
| 124
| 4.058824
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 0.241935
| 124
| 7
| 31
| 17.714286
| 0.606383
| 0.637097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d778e9e3ee58de161ac6da911e3fa89fc110e6cc
| 100
|
py
|
Python
|
answer/14.py
|
harurunrunrun/edu90_myanswer
|
3aeaf9cb68a3bafe550681c6d240c622678c6d03
|
[
"CC0-1.0"
] | null | null | null |
answer/14.py
|
harurunrunrun/edu90_myanswer
|
3aeaf9cb68a3bafe550681c6d240c622678c6d03
|
[
"CC0-1.0"
] | null | null | null |
answer/14.py
|
harurunrunrun/edu90_myanswer
|
3aeaf9cb68a3bafe550681c6d240c622678c6d03
|
[
"CC0-1.0"
] | 1
|
2021-06-04T06:08:15.000Z
|
2021-06-04T06:08:15.000Z
|
n,*a=map(int,open(0).read().split());print(sum(abs(i-j)for i,j in zip(sorted(a[:n]),sorted(a[n:]))))
| 100
| 100
| 0.61
| 24
| 100
| 2.541667
| 0.708333
| 0.065574
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.03
| 100
| 1
| 100
| 100
| 0.618557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d77a8ef1ef72a8509eeb9c0a84f3de6519075cdf
| 305
|
py
|
Python
|
utils/utils.py
|
paddorch/CharCNN.paddle
|
2d9923e40fc841f10490fd0f133a8b2d27a7f2d8
|
[
"Apache-2.0"
] | 3
|
2021-08-20T03:38:01.000Z
|
2021-08-30T00:55:44.000Z
|
utils/utils.py
|
paddorch/CharCNN.paddle
|
2d9923e40fc841f10490fd0f133a8b2d27a7f2d8
|
[
"Apache-2.0"
] | null | null | null |
utils/utils.py
|
paddorch/CharCNN.paddle
|
2d9923e40fc841f10490fd0f133a8b2d27a7f2d8
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import random
import paddle
# import torch
def set_seed(seed):
random.seed(seed)
np.random.seed(seed)
paddle.seed(seed)
# torch.manual_seed(seed)
# torch.cuda.manual_seed(seed)
# torch.cuda.manual_seed_all(seed)
# torch.backends.cudnn.deterministic = True
| 20.333333
| 47
| 0.708197
| 44
| 305
| 4.795455
| 0.386364
| 0.227488
| 0.184834
| 0.180095
| 0.265403
| 0.265403
| 0.265403
| 0
| 0
| 0
| 0
| 0
| 0.183607
| 305
| 14
| 48
| 21.785714
| 0.84739
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.428571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ad540b83b2e46d7e15210d43128afa7f8b4cbe00
| 60
|
py
|
Python
|
main.py
|
florinsuciu3/session3
|
07f62aa43232fa0f6395b0735bdf6b0e7717d115
|
[
"MIT"
] | null | null | null |
main.py
|
florinsuciu3/session3
|
07f62aa43232fa0f6395b0735bdf6b0e7717d115
|
[
"MIT"
] | null | null | null |
main.py
|
florinsuciu3/session3
|
07f62aa43232fa0f6395b0735bdf6b0e7717d115
|
[
"MIT"
] | null | null | null |
print('Hello world! From GitHub.')
print('2nd print line.')
| 20
| 34
| 0.7
| 9
| 60
| 4.666667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.116667
| 60
| 2
| 35
| 30
| 0.773585
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ad83397a7a98cc0d0e1ba844821a4180dbf7a55e
| 3,727
|
py
|
Python
|
z2/part3/updated_part2_batch/jm/parser_errors_2/362820908.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 1
|
2020-04-16T12:13:47.000Z
|
2020-04-16T12:13:47.000Z
|
z2/part3/updated_part2_batch/jm/parser_errors_2/362820908.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:50:15.000Z
|
2020-05-19T14:58:30.000Z
|
z2/part3/updated_part2_batch/jm/parser_errors_2/362820908.py
|
kozakusek/ipp-2020-testy
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
[
"MIT"
] | 18
|
2020-03-06T17:45:13.000Z
|
2020-06-09T19:18:31.000Z
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 362820908
"""
"""
random actions, total chaos
"""
board = gamma_new(4, 5, 2, 6)
assert board is not None
assert gamma_move(board, 1, 2, 3) == 1
assert gamma_move(board, 2, 2, 1) == 1
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_busy_fields(board, 2) == 1
assert gamma_move(board, 1, 4, 2) == 0
assert gamma_move(board, 1, 3, 4) == 1
assert gamma_busy_fields(board, 1) == 2
assert gamma_move(board, 2, 3, 0) == 1
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 2, 2, 0) == 1
assert gamma_busy_fields(board, 1) == 3
assert gamma_golden_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 4, 2) == 0
assert gamma_move(board, 2, 1, 4) == 1
assert gamma_move(board, 1, 1, 1) == 1
assert gamma_move(board, 1, 2, 1) == 0
board358029183 = gamma_board(board)
assert board358029183 is not None
assert board358029183 == (".2.1\n" "..1.\n" "....\n" ".12.\n" "1.22\n")
del board358029183
board358029183 = None
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 2, 3, 0) == 0
assert gamma_move(board, 1, 0, 4) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 0, 4) == 0
assert gamma_move(board, 1, 0, 4) == 0
assert gamma_golden_move(board, 1, 0, 2) == 0
assert gamma_move(board, 2, 1, 0) == 1
assert gamma_free_fields(board, 2) == 10
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 2, 0, 0) == 0
board792791024 = gamma_board(board)
assert board792791024 is not None
assert board792791024 == ("12.1\n" "..1.\n" "....\n" ".12.\n" "1222\n")
del board792791024
board792791024 = None
assert gamma_move(board, 1, 1, 4) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 2, 0, 3) == 1
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 2, 4) == 1
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_golden_possible(board, 1) == 1
assert gamma_golden_move(board, 1, 0, 3) == 1
board224927122 = gamma_board(board)
assert board224927122 is not None
assert board224927122 == ("1211\n" "121.\n" "....\n" ".12.\n" "1222\n")
del board224927122
board224927122 = None
assert gamma_move(board, 2, 2, 4) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_free_fields(board, 2) == 7
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 1, 3, 4) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 1, 2, 4) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_golden_move(board, 2, 4, 3) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_golden_possible(board, 1) == 0
assert gamma_golden_move(board, 1, 3, 1) == 0
assert gamma_move(board, 2, 1, 2) == 1
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 2, 2) == 1
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_move(board, 1, 2, 4) == 0
assert gamma_move(board, 1, 1, 1) == 0
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_busy_fields(board, 2) == 8
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 1, 3, 0) == 0
board208558001 = gamma_board(board)
assert board208558001 is not None
assert board208558001 == ("1211\n" "121.\n" ".22.\n" ".12.\n" "1222\n")
del board208558001
board208558001 = None
assert gamma_move(board, 2, 3, 4) == 0
gamma_delete(board)
| 31.058333
| 71
| 0.676684
| 670
| 3,727
| 3.61194
| 0.068657
| 0.3
| 0.309917
| 0.413223
| 0.710331
| 0.709917
| 0.634298
| 0.521901
| 0.292562
| 0.292562
| 0
| 0.154758
| 0.162597
| 3,727
| 119
| 72
| 31.319328
| 0.620634
| 0
| 0
| 0.212121
| 0
| 0
| 0.032976
| 0
| 0
| 0
| 0
| 0
| 0.757576
| 1
| 0
| false
| 0
| 0.010101
| 0
| 0.010101
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ad9db310f434a5b9cae1970b4ab1e2ec2da49c3f
| 252
|
py
|
Python
|
test/test_address_contact.py
|
L-A-V-S/python_training
|
f34794709931ba2b1c32fa58c9eea6cd6098e3ac
|
[
"Apache-2.0"
] | 1
|
2020-11-03T20:45:32.000Z
|
2020-11-03T20:45:32.000Z
|
test/test_address_contact.py
|
L-A-V-S/python_training
|
f34794709931ba2b1c32fa58c9eea6cd6098e3ac
|
[
"Apache-2.0"
] | null | null | null |
test/test_address_contact.py
|
L-A-V-S/python_training
|
f34794709931ba2b1c32fa58c9eea6cd6098e3ac
|
[
"Apache-2.0"
] | null | null | null |
def test_address_on_home_page(app):
address_from_home_page = app.contact.get_contact_list()[0]
address_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
assert address_from_home_page.address == address_from_edit_page.address
| 42
| 75
| 0.825397
| 41
| 252
| 4.512195
| 0.365854
| 0.237838
| 0.194595
| 0.205405
| 0.259459
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008811
| 0.099206
| 252
| 5
| 76
| 50.4
| 0.806167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a8ee27f54dd41ccb2d139a3c9f4f2aa1264ecbac
| 141
|
py
|
Python
|
venv/Lib/site-packages/nipype/utils/__init__.py
|
richung99/digitizePlots
|
6b408c820660a415a289726e3223e8f558d3e18b
|
[
"MIT"
] | 585
|
2015-01-12T16:06:47.000Z
|
2022-03-26T14:51:08.000Z
|
nipype/utils/__init__.py
|
tamires-consulting/nipype
|
b7879d75a63b6500b2e7d2c3eba5aa7670339274
|
[
"Apache-2.0"
] | 2,329
|
2015-01-01T09:56:41.000Z
|
2022-03-30T14:24:49.000Z
|
nipype/utils/__init__.py
|
tamires-consulting/nipype
|
b7879d75a63b6500b2e7d2c3eba5aa7670339274
|
[
"Apache-2.0"
] | 487
|
2015-01-20T01:04:52.000Z
|
2022-03-21T21:22:47.000Z
|
# -*- coding: utf-8 -*-
from .onetime import OneTimeProperty, setattr_on_read
from .tmpdirs import TemporaryDirectory, InTemporaryDirectory
| 28.2
| 61
| 0.794326
| 15
| 141
| 7.333333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008
| 0.113475
| 141
| 4
| 62
| 35.25
| 0.872
| 0.148936
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d10982e503bc2864f173310051072f1854202121
| 54
|
py
|
Python
|
01_Language/01_Functions/python/deg2rad.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 3
|
2020-06-28T07:42:51.000Z
|
2021-01-15T10:32:11.000Z
|
01_Language/01_Functions/python/deg2rad.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 9
|
2021-03-10T22:45:40.000Z
|
2022-02-27T06:53:20.000Z
|
01_Language/01_Functions/python/deg2rad.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 1
|
2021-01-15T10:51:24.000Z
|
2021-01-15T10:51:24.000Z
|
# coding: utf-8
import math
print(math.radians(45))
| 9
| 23
| 0.703704
| 9
| 54
| 4.222222
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.148148
| 54
| 5
| 24
| 10.8
| 0.76087
| 0.240741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
d111034a690a2dc3fcbbd0dfbe534655f582181a
| 185
|
py
|
Python
|
tbonlineproject/RegistrationRecaptcha/forms.py
|
nathangeffen/tbonline3
|
1b8a3af8d2dc1ee8083ca6638d025e94bd98f253
|
[
"MIT"
] | null | null | null |
tbonlineproject/RegistrationRecaptcha/forms.py
|
nathangeffen/tbonline3
|
1b8a3af8d2dc1ee8083ca6638d025e94bd98f253
|
[
"MIT"
] | 3
|
2021-06-08T23:57:13.000Z
|
2022-01-13T03:42:01.000Z
|
tbonlineproject/RegistrationRecaptcha/forms.py
|
nathangeffen/tbonline-2
|
0d5869197e66a0057fa07cb99f21dde7f5b47c30
|
[
"MIT"
] | null | null | null |
from registration.forms import RegistrationForm
from CommentRecaptcha.fields import ReCaptchaField
class RegistrationFormRecaptcha(RegistrationForm):
recaptcha = ReCaptchaField()
| 26.428571
| 50
| 0.854054
| 15
| 185
| 10.533333
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102703
| 185
| 6
| 51
| 30.833333
| 0.951807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d113d09ed9172e83b61ce84499121c56b6dcac97
| 155
|
py
|
Python
|
previous_code_2/level2/hey-i-already-did-that/test.py
|
kevinpz/google-foobar
|
f6b12b5ff1ad8282faf98f6ad079dbaba104dc38
|
[
"Apache-2.0"
] | null | null | null |
previous_code_2/level2/hey-i-already-did-that/test.py
|
kevinpz/google-foobar
|
f6b12b5ff1ad8282faf98f6ad079dbaba104dc38
|
[
"Apache-2.0"
] | null | null | null |
previous_code_2/level2/hey-i-already-did-that/test.py
|
kevinpz/google-foobar
|
f6b12b5ff1ad8282faf98f6ad079dbaba104dc38
|
[
"Apache-2.0"
] | null | null | null |
from solution import solution
def test_solution_1():
assert solution('1211', 10) == 1
def test_solution_2():
assert solution('210022', 3) == 3
| 15.5
| 37
| 0.677419
| 22
| 155
| 4.590909
| 0.545455
| 0.138614
| 0.29703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136
| 0.193548
| 155
| 9
| 38
| 17.222222
| 0.672
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d16bab94358384a1bbdf62c05ac64b68037aad36
| 307
|
py
|
Python
|
core/base/end_points.py
|
SinanKanidagli/cgn_realip
|
7e25ffe173e24f1206950c4765dfcf8dd2247e8e
|
[
"MIT"
] | null | null | null |
core/base/end_points.py
|
SinanKanidagli/cgn_realip
|
7e25ffe173e24f1206950c4765dfcf8dd2247e8e
|
[
"MIT"
] | null | null | null |
core/base/end_points.py
|
SinanKanidagli/cgn_realip
|
7e25ffe173e24f1206950c4765dfcf8dd2247e8e
|
[
"MIT"
] | null | null | null |
from abc import ABC,abstractmethod
class EndPoints(ABC):
@property
@abstractmethod
def LOGIN(self) -> str:
pass
@property
@abstractmethod
def LOGOUT(self) -> str:
pass
@property
@abstractmethod
def IP_INFORMATION(self) -> str:
pass
| 17.055556
| 36
| 0.586319
| 30
| 307
| 5.966667
| 0.5
| 0.368715
| 0.418994
| 0.212291
| 0.402235
| 0.402235
| 0
| 0
| 0
| 0
| 0
| 0
| 0.332248
| 307
| 18
| 37
| 17.055556
| 0.873171
| 0
| 0
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0.214286
| 0.071429
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0f0519b117610164e2f2b48bbdd3ebf8e333b78a
| 260
|
py
|
Python
|
tests/classes/super_filter.py
|
Wiosoft-Crafts/jsonclasses
|
c01ec27886da64be16a115147aab331c93a72e8b
|
[
"MIT"
] | 50
|
2021-08-18T08:08:04.000Z
|
2022-03-20T07:23:26.000Z
|
tests/classes/super_filter.py
|
zhichao-github/jsonclasses
|
6ee8545683b446939c9c68414fc351cbc917ad41
|
[
"MIT"
] | 1
|
2021-11-23T02:12:29.000Z
|
2021-11-23T13:35:26.000Z
|
tests/classes/super_filter.py
|
zhichao-github/jsonclasses
|
6ee8545683b446939c9c68414fc351cbc917ad41
|
[
"MIT"
] | 8
|
2021-07-01T02:39:15.000Z
|
2021-12-10T02:20:18.000Z
|
from __future__ import annotations
from jsonclasses import jsonclass, types
@jsonclass
class SuperFilter:
list1: list[int] | None = types.listof(int).filter(lambda i: i % 2 == 0)
list2: list[int] | None = types.listof(int).filter(types.mod(2).eq(0))
| 28.888889
| 76
| 0.711538
| 38
| 260
| 4.763158
| 0.578947
| 0.077348
| 0.121547
| 0.176796
| 0.342541
| 0.342541
| 0.342541
| 0
| 0
| 0
| 0
| 0.027273
| 0.153846
| 260
| 8
| 77
| 32.5
| 0.795455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0f20af429eb72e64b4deaaa25b760899118c3a87
| 31,753
|
py
|
Python
|
ClientFiles_Python/Serializer_Struct.py
|
westpoint-robotics/boson_usma_sdk
|
b4ef56cf4250ebd860821a41ae62a2ca571f146b
|
[
"MIT"
] | null | null | null |
ClientFiles_Python/Serializer_Struct.py
|
westpoint-robotics/boson_usma_sdk
|
b4ef56cf4250ebd860821a41ae62a2ca571f146b
|
[
"MIT"
] | null | null | null |
ClientFiles_Python/Serializer_Struct.py
|
westpoint-robotics/boson_usma_sdk
|
b4ef56cf4250ebd860821a41ae62a2ca571f146b
|
[
"MIT"
] | null | null | null |
# /////////////////////////////////////////////////////
# // DO NOT EDIT. This is a machine generated file. //
# /////////////////////////////////////////////////////
from struct import unpack,pack_into
from .ReturnCodes import FLR_RESULT
# Garbage Variable to avoid ever having blank code
class FLR_ROI_T():
def __init__(self):
self.rowStart = None
self.rowStop = None
self.colStart = None
self.colStop = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_ROI_T()
def byteToFLR_ROI_T(inBuff,inPtr):
returnStruct = FLR_ROI_T()
returnStruct.rowStart, returnStruct.rowStop, returnStruct.colStart, returnStruct.colStop = unpack(">HHHH",inBuff[inPtr:inPtr+8])
return returnStruct
# end of byteToFLR_ROI_T()
def FLR_ROI_TToByte(inVal, outBuff, outPtr):
pack_into(">HHHH",outBuff,outPtr,inVal.rowStart, inVal.rowStop, inVal.colStart, inVal.colStop)
# end of FLR_ROI_TToByte()
class FLR_ROIC_FPATEMP_TABLE_T():
def __init__(self):
self.value = [None]*32
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_ROIC_FPATEMP_TABLE_T()
def byteToFLR_ROIC_FPATEMP_TABLE_T(inBuff,inPtr):
returnStruct = FLR_ROIC_FPATEMP_TABLE_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16], returnStruct.value[17], returnStruct.value[18], returnStruct.value[19], returnStruct.value[20], returnStruct.value[21], returnStruct.value[22], returnStruct.value[23], returnStruct.value[24], returnStruct.value[25], returnStruct.value[26], returnStruct.value[27], returnStruct.value[28], returnStruct.value[29], returnStruct.value[30], returnStruct.value[31] = unpack(">hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh",inBuff[inPtr:inPtr+64])
return returnStruct
# end of byteToFLR_ROIC_FPATEMP_TABLE_T()
def FLR_ROIC_FPATEMP_TABLE_TToByte(inVal, outBuff, outPtr):
pack_into(">hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16], inVal.value[17], inVal.value[18], inVal.value[19], inVal.value[20], inVal.value[21], inVal.value[22], inVal.value[23], inVal.value[24], inVal.value[25], inVal.value[26], inVal.value[27], inVal.value[28], inVal.value[29], inVal.value[30], inVal.value[31])
# end of FLR_ROIC_FPATEMP_TABLE_TToByte()
class FLR_BOSON_PARTNUMBER_T():
def __init__(self):
self.value = [None]*20
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_BOSON_PARTNUMBER_T()
def byteToFLR_BOSON_PARTNUMBER_T(inBuff,inPtr):
returnStruct = FLR_BOSON_PARTNUMBER_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16], returnStruct.value[17], returnStruct.value[18], returnStruct.value[19] = unpack(">BBBBBBBBBBBBBBBBBBBB",inBuff[inPtr:inPtr+20])
return returnStruct
# end of byteToFLR_BOSON_PARTNUMBER_T()
def FLR_BOSON_PARTNUMBER_TToByte(inVal, outBuff, outPtr):
pack_into(">BBBBBBBBBBBBBBBBBBBB",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16], inVal.value[17], inVal.value[18], inVal.value[19])
# end of FLR_BOSON_PARTNUMBER_TToByte()
class FLR_BOSON_SENSOR_PARTNUMBER_T():
def __init__(self):
self.value = [None]*32
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_BOSON_SENSOR_PARTNUMBER_T()
def byteToFLR_BOSON_SENSOR_PARTNUMBER_T(inBuff,inPtr):
returnStruct = FLR_BOSON_SENSOR_PARTNUMBER_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16], returnStruct.value[17], returnStruct.value[18], returnStruct.value[19], returnStruct.value[20], returnStruct.value[21], returnStruct.value[22], returnStruct.value[23], returnStruct.value[24], returnStruct.value[25], returnStruct.value[26], returnStruct.value[27], returnStruct.value[28], returnStruct.value[29], returnStruct.value[30], returnStruct.value[31] = unpack(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",inBuff[inPtr:inPtr+32])
return returnStruct
# end of byteToFLR_BOSON_SENSOR_PARTNUMBER_T()
def FLR_BOSON_SENSOR_PARTNUMBER_TToByte(inVal, outBuff, outPtr):
pack_into(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16], inVal.value[17], inVal.value[18], inVal.value[19], inVal.value[20], inVal.value[21], inVal.value[22], inVal.value[23], inVal.value[24], inVal.value[25], inVal.value[26], inVal.value[27], inVal.value[28], inVal.value[29], inVal.value[30], inVal.value[31])
# end of FLR_BOSON_SENSOR_PARTNUMBER_TToByte()
class FLR_BOSON_GAIN_SWITCH_PARAMS_T():
def __init__(self):
self.pHighToLowPercent = None
self.cHighToLowPercent = None
self.pLowToHighPercent = None
self.hysteresisPercent = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_BOSON_GAIN_SWITCH_PARAMS_T()
def byteToFLR_BOSON_GAIN_SWITCH_PARAMS_T(inBuff,inPtr):
returnStruct = FLR_BOSON_GAIN_SWITCH_PARAMS_T()
returnStruct.pHighToLowPercent, returnStruct.cHighToLowPercent, returnStruct.pLowToHighPercent, returnStruct.hysteresisPercent = unpack(">IIII",inBuff[inPtr:inPtr+16])
return returnStruct
# end of byteToFLR_BOSON_GAIN_SWITCH_PARAMS_T()
def FLR_BOSON_GAIN_SWITCH_PARAMS_TToByte(inVal, outBuff, outPtr):
pack_into(">IIII",outBuff,outPtr,inVal.pHighToLowPercent, inVal.cHighToLowPercent, inVal.pLowToHighPercent, inVal.hysteresisPercent)
# end of FLR_BOSON_GAIN_SWITCH_PARAMS_TToByte()
class FLR_DVO_YCBCR_SETTINGS_T():
def __init__(self):
self.ycbcrFormat = None
self.cbcrOrder = None
self.yOrder = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_DVO_YCBCR_SETTINGS_T()
def byteToFLR_DVO_YCBCR_SETTINGS_T(inBuff,inPtr):
returnStruct = FLR_DVO_YCBCR_SETTINGS_T()
returnStruct.ycbcrFormat, returnStruct.cbcrOrder, returnStruct.yOrder = unpack(">iii",inBuff[inPtr:inPtr+12])
return returnStruct
# end of byteToFLR_DVO_YCBCR_SETTINGS_T()
def FLR_DVO_YCBCR_SETTINGS_TToByte(inVal, outBuff, outPtr):
pack_into(">iii",outBuff,outPtr,inVal.ycbcrFormat, inVal.cbcrOrder, inVal.yOrder)
# end of FLR_DVO_YCBCR_SETTINGS_TToByte()
class FLR_DVO_RGB_SETTINGS_T():
def __init__(self):
self.rgbFormat = None
self.rgbOrder = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_DVO_RGB_SETTINGS_T()
def byteToFLR_DVO_RGB_SETTINGS_T(inBuff,inPtr):
returnStruct = FLR_DVO_RGB_SETTINGS_T()
returnStruct.rgbFormat, returnStruct.rgbOrder = unpack(">ii",inBuff[inPtr:inPtr+8])
return returnStruct
# end of byteToFLR_DVO_RGB_SETTINGS_T()
def FLR_DVO_RGB_SETTINGS_TToByte(inVal, outBuff, outPtr):
pack_into(">ii",outBuff,outPtr,inVal.rgbFormat, inVal.rgbOrder)
# end of FLR_DVO_RGB_SETTINGS_TToByte()
class FLR_DVO_LCD_CONFIG_T():
def __init__(self):
self.width = None
self.hPulseWidth = None
self.hBackP = None
self.hFrontP = None
self.height = None
self.vPulseWidth = None
self.vBackP = None
self.vFrontP = None
self.outputFormat = None
self.control = None
self.rotation = None
self.pixelClockkHz = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_DVO_LCD_CONFIG_T()
def byteToFLR_DVO_LCD_CONFIG_T(inBuff,inPtr):
returnStruct = FLR_DVO_LCD_CONFIG_T()
returnStruct.width, returnStruct.hPulseWidth, returnStruct.hBackP, returnStruct.hFrontP, returnStruct.height, returnStruct.vPulseWidth, returnStruct.vBackP, returnStruct.vFrontP, returnStruct.outputFormat, returnStruct.control, returnStruct.rotation, returnStruct.pixelClockkHz = unpack(">IIIIIIIIIIII",inBuff[inPtr:inPtr+48])
return returnStruct
# end of byteToFLR_DVO_LCD_CONFIG_T()
def FLR_DVO_LCD_CONFIG_TToByte(inVal, outBuff, outPtr):
pack_into(">IIIIIIIIIIII",outBuff,outPtr,inVal.width, inVal.hPulseWidth, inVal.hBackP, inVal.hFrontP, inVal.height, inVal.vPulseWidth, inVal.vBackP, inVal.vFrontP, inVal.outputFormat, inVal.control, inVal.rotation, inVal.pixelClockkHz)
# end of FLR_DVO_LCD_CONFIG_TToByte()
class FLR_CAPTURE_SETTINGS_T():
def __init__(self):
self.dataSrc = None
self.numFrames = None
self.bufferIndex = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_CAPTURE_SETTINGS_T()
def byteToFLR_CAPTURE_SETTINGS_T(inBuff,inPtr):
returnStruct = FLR_CAPTURE_SETTINGS_T()
returnStruct.dataSrc, returnStruct.numFrames, returnStruct.bufferIndex = unpack(">iIH",inBuff[inPtr:inPtr+10])
return returnStruct
# end of byteToFLR_CAPTURE_SETTINGS_T()
def FLR_CAPTURE_SETTINGS_TToByte(inVal, outBuff, outPtr):
pack_into(">iIH",outBuff,outPtr,inVal.dataSrc, inVal.numFrames, inVal.bufferIndex)
# end of FLR_CAPTURE_SETTINGS_TToByte()
class FLR_CAPTURE_FILE_SETTINGS_T():
def __init__(self):
self.captureFileType = None
self.filePath = [None]*128
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_CAPTURE_FILE_SETTINGS_T()
def byteToFLR_CAPTURE_FILE_SETTINGS_T(inBuff,inPtr):
returnStruct = FLR_CAPTURE_FILE_SETTINGS_T()
returnStruct.captureFileType, returnStruct.filePath[0], returnStruct.filePath[1], returnStruct.filePath[2], returnStruct.filePath[3], returnStruct.filePath[4], returnStruct.filePath[5], returnStruct.filePath[6], returnStruct.filePath[7], returnStruct.filePath[8], returnStruct.filePath[9], returnStruct.filePath[10], returnStruct.filePath[11], returnStruct.filePath[12], returnStruct.filePath[13], returnStruct.filePath[14], returnStruct.filePath[15], returnStruct.filePath[16], returnStruct.filePath[17], returnStruct.filePath[18], returnStruct.filePath[19], returnStruct.filePath[20], returnStruct.filePath[21], returnStruct.filePath[22], returnStruct.filePath[23], returnStruct.filePath[24], returnStruct.filePath[25], returnStruct.filePath[26], returnStruct.filePath[27], returnStruct.filePath[28], returnStruct.filePath[29], returnStruct.filePath[30], returnStruct.filePath[31], returnStruct.filePath[32], returnStruct.filePath[33], returnStruct.filePath[34], returnStruct.filePath[35], returnStruct.filePath[36], returnStruct.filePath[37], returnStruct.filePath[38], returnStruct.filePath[39], returnStruct.filePath[40], returnStruct.filePath[41], returnStruct.filePath[42], returnStruct.filePath[43], returnStruct.filePath[44], returnStruct.filePath[45], returnStruct.filePath[46], returnStruct.filePath[47], returnStruct.filePath[48], returnStruct.filePath[49], returnStruct.filePath[50], returnStruct.filePath[51], returnStruct.filePath[52], returnStruct.filePath[53], returnStruct.filePath[54], returnStruct.filePath[55], returnStruct.filePath[56], returnStruct.filePath[57], returnStruct.filePath[58], returnStruct.filePath[59], returnStruct.filePath[60], returnStruct.filePath[61], returnStruct.filePath[62], returnStruct.filePath[63], returnStruct.filePath[64], returnStruct.filePath[65], returnStruct.filePath[66], returnStruct.filePath[67], returnStruct.filePath[68], returnStruct.filePath[69], returnStruct.filePath[70], returnStruct.filePath[71], returnStruct.filePath[72], returnStruct.filePath[73], returnStruct.filePath[74], returnStruct.filePath[75], returnStruct.filePath[76], returnStruct.filePath[77], returnStruct.filePath[78], returnStruct.filePath[79], returnStruct.filePath[80], returnStruct.filePath[81], returnStruct.filePath[82], returnStruct.filePath[83], returnStruct.filePath[84], returnStruct.filePath[85], returnStruct.filePath[86], returnStruct.filePath[87], returnStruct.filePath[88], returnStruct.filePath[89], returnStruct.filePath[90], returnStruct.filePath[91], returnStruct.filePath[92], returnStruct.filePath[93], returnStruct.filePath[94], returnStruct.filePath[95], returnStruct.filePath[96], returnStruct.filePath[97], returnStruct.filePath[98], returnStruct.filePath[99], returnStruct.filePath[100], returnStruct.filePath[101], returnStruct.filePath[102], returnStruct.filePath[103], returnStruct.filePath[104], returnStruct.filePath[105], returnStruct.filePath[106], returnStruct.filePath[107], returnStruct.filePath[108], returnStruct.filePath[109], returnStruct.filePath[110], returnStruct.filePath[111], returnStruct.filePath[112], returnStruct.filePath[113], returnStruct.filePath[114], returnStruct.filePath[115], returnStruct.filePath[116], returnStruct.filePath[117], returnStruct.filePath[118], returnStruct.filePath[119], returnStruct.filePath[120], returnStruct.filePath[121], returnStruct.filePath[122], returnStruct.filePath[123], returnStruct.filePath[124], returnStruct.filePath[125], returnStruct.filePath[126], returnStruct.filePath[127] = unpack(">iBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",inBuff[inPtr:inPtr+132])
return returnStruct
# end of byteToFLR_CAPTURE_FILE_SETTINGS_T()
def FLR_CAPTURE_FILE_SETTINGS_TToByte(inVal, outBuff, outPtr):
pack_into(">iBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",outBuff,outPtr,inVal.captureFileType, inVal.filePath[0], inVal.filePath[1], inVal.filePath[2], inVal.filePath[3], inVal.filePath[4], inVal.filePath[5], inVal.filePath[6], inVal.filePath[7], inVal.filePath[8], inVal.filePath[9], inVal.filePath[10], inVal.filePath[11], inVal.filePath[12], inVal.filePath[13], inVal.filePath[14], inVal.filePath[15], inVal.filePath[16], inVal.filePath[17], inVal.filePath[18], inVal.filePath[19], inVal.filePath[20], inVal.filePath[21], inVal.filePath[22], inVal.filePath[23], inVal.filePath[24], inVal.filePath[25], inVal.filePath[26], inVal.filePath[27], inVal.filePath[28], inVal.filePath[29], inVal.filePath[30], inVal.filePath[31], inVal.filePath[32], inVal.filePath[33], inVal.filePath[34], inVal.filePath[35], inVal.filePath[36], inVal.filePath[37], inVal.filePath[38], inVal.filePath[39], inVal.filePath[40], inVal.filePath[41], inVal.filePath[42], inVal.filePath[43], inVal.filePath[44], inVal.filePath[45], inVal.filePath[46], inVal.filePath[47], inVal.filePath[48], inVal.filePath[49], inVal.filePath[50], inVal.filePath[51], inVal.filePath[52], inVal.filePath[53], inVal.filePath[54], inVal.filePath[55], inVal.filePath[56], inVal.filePath[57], inVal.filePath[58], inVal.filePath[59], inVal.filePath[60], inVal.filePath[61], inVal.filePath[62], inVal.filePath[63], inVal.filePath[64], inVal.filePath[65], inVal.filePath[66], inVal.filePath[67], inVal.filePath[68], inVal.filePath[69], inVal.filePath[70], inVal.filePath[71], inVal.filePath[72], inVal.filePath[73], inVal.filePath[74], inVal.filePath[75], inVal.filePath[76], inVal.filePath[77], inVal.filePath[78], inVal.filePath[79], inVal.filePath[80], inVal.filePath[81], inVal.filePath[82], inVal.filePath[83], inVal.filePath[84], inVal.filePath[85], inVal.filePath[86], inVal.filePath[87], inVal.filePath[88], inVal.filePath[89], inVal.filePath[90], inVal.filePath[91], inVal.filePath[92], inVal.filePath[93], inVal.filePath[94], inVal.filePath[95], inVal.filePath[96], inVal.filePath[97], inVal.filePath[98], inVal.filePath[99], inVal.filePath[100], inVal.filePath[101], inVal.filePath[102], inVal.filePath[103], inVal.filePath[104], inVal.filePath[105], inVal.filePath[106], inVal.filePath[107], inVal.filePath[108], inVal.filePath[109], inVal.filePath[110], inVal.filePath[111], inVal.filePath[112], inVal.filePath[113], inVal.filePath[114], inVal.filePath[115], inVal.filePath[116], inVal.filePath[117], inVal.filePath[118], inVal.filePath[119], inVal.filePath[120], inVal.filePath[121], inVal.filePath[122], inVal.filePath[123], inVal.filePath[124], inVal.filePath[125], inVal.filePath[126], inVal.filePath[127])
# end of FLR_CAPTURE_FILE_SETTINGS_TToByte()
class FLR_TF_WLUT_T():
def __init__(self):
self.value = [None]*32
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_TF_WLUT_T()
def byteToFLR_TF_WLUT_T(inBuff,inPtr):
returnStruct = FLR_TF_WLUT_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16], returnStruct.value[17], returnStruct.value[18], returnStruct.value[19], returnStruct.value[20], returnStruct.value[21], returnStruct.value[22], returnStruct.value[23], returnStruct.value[24], returnStruct.value[25], returnStruct.value[26], returnStruct.value[27], returnStruct.value[28], returnStruct.value[29], returnStruct.value[30], returnStruct.value[31] = unpack(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",inBuff[inPtr:inPtr+32])
return returnStruct
# end of byteToFLR_TF_WLUT_T()
def FLR_TF_WLUT_TToByte(inVal, outBuff, outPtr):
pack_into(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16], inVal.value[17], inVal.value[18], inVal.value[19], inVal.value[20], inVal.value[21], inVal.value[22], inVal.value[23], inVal.value[24], inVal.value[25], inVal.value[26], inVal.value[27], inVal.value[28], inVal.value[29], inVal.value[30], inVal.value[31])
# end of FLR_TF_WLUT_TToByte()
class FLR_TF_NF_LUT_T():
def __init__(self):
self.value = [None]*17
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_TF_NF_LUT_T()
def byteToFLR_TF_NF_LUT_T(inBuff,inPtr):
returnStruct = FLR_TF_NF_LUT_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16] = unpack(">HHHHHHHHHHHHHHHHH",inBuff[inPtr:inPtr+34])
return returnStruct
# end of byteToFLR_TF_NF_LUT_T()
def FLR_TF_NF_LUT_TToByte(inVal, outBuff, outPtr):
pack_into(">HHHHHHHHHHHHHHHHH",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16])
# end of FLR_TF_NF_LUT_TToByte()
class FLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_T():
def __init__(self):
self.value = [None]*17
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_T()
def byteToFLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_T(inBuff,inPtr):
returnStruct = FLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16] = unpack(">HHHHHHHHHHHHHHHHH",inBuff[inPtr:inPtr+34])
return returnStruct
# end of byteToFLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_T()
def FLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_TToByte(inVal, outBuff, outPtr):
pack_into(">HHHHHHHHHHHHHHHHH",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16])
# end of FLR_TF_TEMP_SIGNAL_COMP_FACTOR_LUT_TToByte()
class FLR_SPNR_PSD_KERNEL_T():
def __init__(self):
self.fvalue = [None]*64
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_SPNR_PSD_KERNEL_T()
def byteToFLR_SPNR_PSD_KERNEL_T(inBuff,inPtr):
returnStruct = FLR_SPNR_PSD_KERNEL_T()
returnStruct.fvalue[0], returnStruct.fvalue[1], returnStruct.fvalue[2], returnStruct.fvalue[3], returnStruct.fvalue[4], returnStruct.fvalue[5], returnStruct.fvalue[6], returnStruct.fvalue[7], returnStruct.fvalue[8], returnStruct.fvalue[9], returnStruct.fvalue[10], returnStruct.fvalue[11], returnStruct.fvalue[12], returnStruct.fvalue[13], returnStruct.fvalue[14], returnStruct.fvalue[15], returnStruct.fvalue[16], returnStruct.fvalue[17], returnStruct.fvalue[18], returnStruct.fvalue[19], returnStruct.fvalue[20], returnStruct.fvalue[21], returnStruct.fvalue[22], returnStruct.fvalue[23], returnStruct.fvalue[24], returnStruct.fvalue[25], returnStruct.fvalue[26], returnStruct.fvalue[27], returnStruct.fvalue[28], returnStruct.fvalue[29], returnStruct.fvalue[30], returnStruct.fvalue[31], returnStruct.fvalue[32], returnStruct.fvalue[33], returnStruct.fvalue[34], returnStruct.fvalue[35], returnStruct.fvalue[36], returnStruct.fvalue[37], returnStruct.fvalue[38], returnStruct.fvalue[39], returnStruct.fvalue[40], returnStruct.fvalue[41], returnStruct.fvalue[42], returnStruct.fvalue[43], returnStruct.fvalue[44], returnStruct.fvalue[45], returnStruct.fvalue[46], returnStruct.fvalue[47], returnStruct.fvalue[48], returnStruct.fvalue[49], returnStruct.fvalue[50], returnStruct.fvalue[51], returnStruct.fvalue[52], returnStruct.fvalue[53], returnStruct.fvalue[54], returnStruct.fvalue[55], returnStruct.fvalue[56], returnStruct.fvalue[57], returnStruct.fvalue[58], returnStruct.fvalue[59], returnStruct.fvalue[60], returnStruct.fvalue[61], returnStruct.fvalue[62], returnStruct.fvalue[63] = unpack(">ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",inBuff[inPtr:inPtr+256])
return returnStruct
# end of byteToFLR_SPNR_PSD_KERNEL_T()
def FLR_SPNR_PSD_KERNEL_TToByte(inVal, outBuff, outPtr):
pack_into(">ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",outBuff,outPtr,inVal.fvalue[0], inVal.fvalue[1], inVal.fvalue[2], inVal.fvalue[3], inVal.fvalue[4], inVal.fvalue[5], inVal.fvalue[6], inVal.fvalue[7], inVal.fvalue[8], inVal.fvalue[9], inVal.fvalue[10], inVal.fvalue[11], inVal.fvalue[12], inVal.fvalue[13], inVal.fvalue[14], inVal.fvalue[15], inVal.fvalue[16], inVal.fvalue[17], inVal.fvalue[18], inVal.fvalue[19], inVal.fvalue[20], inVal.fvalue[21], inVal.fvalue[22], inVal.fvalue[23], inVal.fvalue[24], inVal.fvalue[25], inVal.fvalue[26], inVal.fvalue[27], inVal.fvalue[28], inVal.fvalue[29], inVal.fvalue[30], inVal.fvalue[31], inVal.fvalue[32], inVal.fvalue[33], inVal.fvalue[34], inVal.fvalue[35], inVal.fvalue[36], inVal.fvalue[37], inVal.fvalue[38], inVal.fvalue[39], inVal.fvalue[40], inVal.fvalue[41], inVal.fvalue[42], inVal.fvalue[43], inVal.fvalue[44], inVal.fvalue[45], inVal.fvalue[46], inVal.fvalue[47], inVal.fvalue[48], inVal.fvalue[49], inVal.fvalue[50], inVal.fvalue[51], inVal.fvalue[52], inVal.fvalue[53], inVal.fvalue[54], inVal.fvalue[55], inVal.fvalue[56], inVal.fvalue[57], inVal.fvalue[58], inVal.fvalue[59], inVal.fvalue[60], inVal.fvalue[61], inVal.fvalue[62], inVal.fvalue[63])
# end of FLR_SPNR_PSD_KERNEL_TToByte()
class FLR_SCALER_ZOOM_PARAMS_T():
def __init__(self):
self.zoom = None
self.xCenter = None
self.yCenter = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_SCALER_ZOOM_PARAMS_T()
def byteToFLR_SCALER_ZOOM_PARAMS_T(inBuff,inPtr):
returnStruct = FLR_SCALER_ZOOM_PARAMS_T()
returnStruct.zoom, returnStruct.xCenter, returnStruct.yCenter = unpack(">III",inBuff[inPtr:inPtr+12])
return returnStruct
# end of byteToFLR_SCALER_ZOOM_PARAMS_T()
def FLR_SCALER_ZOOM_PARAMS_TToByte(inVal, outBuff, outPtr):
pack_into(">III",outBuff,outPtr,inVal.zoom, inVal.xCenter, inVal.yCenter)
# end of FLR_SCALER_ZOOM_PARAMS_TToByte()
class FLR_TESTRAMP_SETTINGS_T():
def __init__(self):
self.start = None
self.end = None
self.increment = None
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_TESTRAMP_SETTINGS_T()
def byteToFLR_TESTRAMP_SETTINGS_T(inBuff,inPtr):
returnStruct = FLR_TESTRAMP_SETTINGS_T()
returnStruct.start, returnStruct.end, returnStruct.increment = unpack(">HHH",inBuff[inPtr:inPtr+6])
return returnStruct
# end of byteToFLR_TESTRAMP_SETTINGS_T()
def FLR_TESTRAMP_SETTINGS_TToByte(inVal, outBuff, outPtr):
pack_into(">HHH",outBuff,outPtr,inVal.start, inVal.end, inVal.increment)
# end of FLR_TESTRAMP_SETTINGS_TToByte()
class FLR_SYSINFO_MONITOR_BUILD_VARIANT_T():
def __init__(self):
self.value = [None]*50
# end of __init__()
def __eq__(self, other):
return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
# end of FLR_SYSINFO_MONITOR_BUILD_VARIANT_T()
def byteToFLR_SYSINFO_MONITOR_BUILD_VARIANT_T(inBuff,inPtr):
returnStruct = FLR_SYSINFO_MONITOR_BUILD_VARIANT_T()
returnStruct.value[0], returnStruct.value[1], returnStruct.value[2], returnStruct.value[3], returnStruct.value[4], returnStruct.value[5], returnStruct.value[6], returnStruct.value[7], returnStruct.value[8], returnStruct.value[9], returnStruct.value[10], returnStruct.value[11], returnStruct.value[12], returnStruct.value[13], returnStruct.value[14], returnStruct.value[15], returnStruct.value[16], returnStruct.value[17], returnStruct.value[18], returnStruct.value[19], returnStruct.value[20], returnStruct.value[21], returnStruct.value[22], returnStruct.value[23], returnStruct.value[24], returnStruct.value[25], returnStruct.value[26], returnStruct.value[27], returnStruct.value[28], returnStruct.value[29], returnStruct.value[30], returnStruct.value[31], returnStruct.value[32], returnStruct.value[33], returnStruct.value[34], returnStruct.value[35], returnStruct.value[36], returnStruct.value[37], returnStruct.value[38], returnStruct.value[39], returnStruct.value[40], returnStruct.value[41], returnStruct.value[42], returnStruct.value[43], returnStruct.value[44], returnStruct.value[45], returnStruct.value[46], returnStruct.value[47], returnStruct.value[48], returnStruct.value[49] = unpack(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",inBuff[inPtr:inPtr+50])
return returnStruct
# end of byteToFLR_SYSINFO_MONITOR_BUILD_VARIANT_T()
def FLR_SYSINFO_MONITOR_BUILD_VARIANT_TToByte(inVal, outBuff, outPtr):
pack_into(">BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",outBuff,outPtr,inVal.value[0], inVal.value[1], inVal.value[2], inVal.value[3], inVal.value[4], inVal.value[5], inVal.value[6], inVal.value[7], inVal.value[8], inVal.value[9], inVal.value[10], inVal.value[11], inVal.value[12], inVal.value[13], inVal.value[14], inVal.value[15], inVal.value[16], inVal.value[17], inVal.value[18], inVal.value[19], inVal.value[20], inVal.value[21], inVal.value[22], inVal.value[23], inVal.value[24], inVal.value[25], inVal.value[26], inVal.value[27], inVal.value[28], inVal.value[29], inVal.value[30], inVal.value[31], inVal.value[32], inVal.value[33], inVal.value[34], inVal.value[35], inVal.value[36], inVal.value[37], inVal.value[38], inVal.value[39], inVal.value[40], inVal.value[41], inVal.value[42], inVal.value[43], inVal.value[44], inVal.value[45], inVal.value[46], inVal.value[47], inVal.value[48], inVal.value[49])
# end of FLR_SYSINFO_MONITOR_BUILD_VARIANT_TToByte()
| 77.070388
| 3,673
| 0.750764
| 4,218
| 31,753
| 5.407065
| 0.062826
| 0.149077
| 0.022362
| 0.008945
| 0.536502
| 0.486693
| 0.42693
| 0.393432
| 0.386241
| 0.378656
| 0
| 0.052484
| 0.102321
| 31,753
| 411
| 3,674
| 77.257908
| 0.747649
| 0.076213
| 0
| 0.46
| 1
| 0
| 0.030351
| 0.025019
| 0
| 0
| 0
| 0
| 0
| 1
| 0.34
| false
| 0
| 0.008
| 0.136
| 0.62
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
0f5527442f177b7652280badd3d5ff91d9d19298
| 87
|
py
|
Python
|
ontology/logistic_regression/sherlock/listify_circuits_k09_reverse.py
|
ehbeam/neuro-knowledge-engine
|
9dc56ade0bbbd8d14f0660774f787c3f46d7e632
|
[
"MIT"
] | 15
|
2020-07-17T07:10:26.000Z
|
2022-02-18T05:51:45.000Z
|
ontology/neural_network/sherlock/listify_circuits_k09_reverse.py
|
YifeiCAO/neuro-knowledge-engine
|
9dc56ade0bbbd8d14f0660774f787c3f46d7e632
|
[
"MIT"
] | 2
|
2022-01-14T09:10:12.000Z
|
2022-01-28T17:32:42.000Z
|
ontology/neural_network/sherlock/listify_circuits_k09_reverse.py
|
YifeiCAO/neuro-knowledge-engine
|
9dc56ade0bbbd8d14f0660774f787c3f46d7e632
|
[
"MIT"
] | 4
|
2021-12-22T13:27:32.000Z
|
2022-02-18T05:51:47.000Z
|
#!/bin/python
import listify_circuits
listify_circuits.optimize_circuits(9, 'reverse')
| 21.75
| 48
| 0.827586
| 11
| 87
| 6.272727
| 0.727273
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.057471
| 87
| 4
| 48
| 21.75
| 0.829268
| 0.137931
| 0
| 0
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0f6750f2f5a7c5cacab325a7cf6d00771c9b55c5
| 128
|
py
|
Python
|
calvin/actorstore/systemactors/json/__init__.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 334
|
2015-06-04T15:14:28.000Z
|
2022-02-09T11:14:17.000Z
|
calvin/actorstore/systemactors/json/__init__.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 89
|
2015-06-13T19:15:35.000Z
|
2019-12-03T19:23:20.000Z
|
calvin/actorstore/systemactors/json/__init__.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 112
|
2015-06-06T19:16:54.000Z
|
2020-10-19T01:27:55.000Z
|
"""This module provides container types List and Dict, accessors, and methods for reading and writing JSON formatted strings."""
| 128
| 128
| 0.796875
| 18
| 128
| 5.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132813
| 128
| 1
| 128
| 128
| 0.918919
| 0.953125
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0f7f7a03c58bbfbe0f9877bb8dbff871aa71c66c
| 7,240
|
py
|
Python
|
pyFM/optimize/base_functions.py
|
Yang-L1/pyFM
|
bfc9cf58da81441c13dbfe0645872e82b6038521
|
[
"MIT"
] | 35
|
2020-09-10T14:27:37.000Z
|
2022-03-30T02:39:18.000Z
|
pyFM/optimize/base_functions.py
|
Yang-L1/pyFM
|
bfc9cf58da81441c13dbfe0645872e82b6038521
|
[
"MIT"
] | 2
|
2020-12-01T07:30:24.000Z
|
2020-12-03T08:19:57.000Z
|
pyFM/optimize/base_functions.py
|
Yang-L1/pyFM
|
bfc9cf58da81441c13dbfe0645872e82b6038521
|
[
"MIT"
] | 3
|
2021-02-15T10:56:23.000Z
|
2021-12-27T07:31:15.000Z
|
import numpy as np
def descr_preservation(C, descr1_red, descr2_red):
"""
Compute the descriptor preservation constraint
Parameters
---------------------
C : (K2,K1) Functional map
descr1 : (K1,p) descriptors on first basis
descr2 : (K2,p) descriptros on second basis
Output
---------------------
energy : descriptor preservation squared norm
"""
return 0.5 * np.square(C @ descr1_red - descr2_red).sum()
def descr_preservation_grad(C, descr1_red, descr2_red):
"""
Compute the gradient of the descriptor preservation constraint
Parameters
---------------------
C : (K2,K1) Functional map
descr1 : (K1,p) descriptors on first basis
descr2 : (K2,p) descriptros on second basis
Output
---------------------
gradient : gradient of the descriptor preservation squared norm
"""
return (C @ descr1_red - descr2_red) @ descr1_red.T
def LB_commutation(C, ev_sqdiff):
"""
Compute the LB commutativity constraint
Parameters
---------------------
C : (K2,K1) Functional map
ev_sqdiff : (K2,K1) [normalized] matrix of squared eigenvalue differences
Output
---------------------
energy : (float) LB commutativity squared norm
"""
return 0.5 * (np.square(C) * ev_sqdiff).sum()
def LB_commutation_grad(C, ev_sqdiff):
"""
Compute the gradient of the LB commutativity constraint
Parameters
---------------------
C : (K2,K1) Functional map
ev_sqdiff : (K2,K1) [normalized] matrix of squared eigenvalue differences
Output
---------------------
gradient : (K2,K1) gradient of the LB commutativity squared norm
"""
return C * ev_sqdiff
def op_commutation(C, op1, op2):
"""
Compute the operator commutativity constraint.
Can be used with descriptor multiplication operator
Parameters
---------------------
C : (K2,K1) Functional map
op1 : (K1,K1) operator on first basis
op2 : (K2,K2) descriptros on second basis
Output
---------------------
energy : (float) operator commutativity squared norm
"""
return 0.5 * np.square(C @ op1 - op2 @ C).sum()
def op_commutation_grad(C, op1, op2):
"""
Compute the gradient of the operator commutativity constraint.
Can be used with descriptor multiplication operator
Parameters
---------------------
C : (K2,K1) Functional map
op1 : (K1,K1) operator on first basis
op2 : (K2,K2) descriptros on second basis
Output
---------------------
gardient : (K2,K1) gradient of the operator commutativity squared norm
"""
return op2.T @ (op2 @ C - C @ op1) - (op2 @ C - C @ op1) @ op1.T
def oplist_commutation(C, op_list):
"""
Compute the operator commutativity constraint for a list of pairs of operators
Can be used with a list of descriptor multiplication operator
Parameters
---------------------
C : (K2,K1) Functional map
op_list : list of tuple( (K1,K1), (K2,K2) ) operators on first and second basis
Output
---------------------
energy : (float) sum of operators commutativity squared norm
"""
energy = 0
for (op1, op2) in op_list:
energy += op_commutation(C, op1, op2)
return energy
def oplist_commutation_grad(C, op_list):
"""
Compute the gradient of the operator commutativity constraint for a list of pairs of operators
Can be used with a list of descriptor multiplication operator
Parameters
---------------------
C : (K2,K1) Functional map
op_list : list of tuple( (K1,K1), (K2,K2) ) operators on first and second basis
Output
---------------------
gradient : (K2,K1) gradient of the sum of operators commutativity squared norm
"""
gradient = 0
for (op1, op2) in op_list:
gradient += op_commutation_grad(C, op1, op2)
return gradient
def energy_func_std(C, descr_mu, lap_mu, descr_comm_mu, orient_mu, descr1_red, descr2_red, list_descr, orient_op, ev_sqdiff):
"""
Evaluation of the energy for standard FM computation
Parameters:
----------------------
C : (K2*K1) or (K2,K1) Functional map
descr_mu : scaling of the descriptor preservation term
lap_mu : scaling of the laplacian commutativity term
descr_comm_mu : scaling of the descriptor commutativity term
orient_mu : scaling of the orientation preservation term
descr1 : (K1,p) descriptors on first basis
descr2 : (K2,p) descriptros on second basis
list_descr : p-uple( (K1,K1), (K2,K2) ) operators on first and second basis
related to descriptors.
orient_op : p-uple( (K1,K1), (K2,K2) ) operators on first and second basis
related to orientation preservation operators.
ev_sqdiff : (K2,K1) [normalized] matrix of squared eigenvalue differences
Output
------------------------
energy : float - value of the energy
"""
k1 = descr1_red.shape[0]
k2 = descr2_red.shape[0]
C = C.reshape((k2,k1))
energy = 0
if descr_mu > 0:
energy += descr_mu * descr_preservation(C, descr1_red, descr2_red)
if lap_mu > 0:
energy += lap_mu * LB_commutation(C, ev_sqdiff)
if descr_comm_mu > 0:
energy += descr_comm_mu * oplist_commutation(C, list_descr)
if orient_mu > 0:
energy += orient_mu * oplist_commutation(C, orient_op)
return energy
def grad_energy_std(C, descr_mu, lap_mu, descr_comm_mu, orient_mu, descr1_red, descr2_red, list_descr, orient_op, ev_sqdiff):
"""
Evaluation of the gradient of the energy for standard FM computation
Parameters:
----------------------
C : (K2*K1) or (K2,K1) Functional map
descr_mu : scaling of the descriptor preservation term
lap_mu : scaling of the laplacian commutativity term
descr_comm_mu : scaling of the descriptor commutativity term
orient_mu : scaling of the orientation preservation term
descr1 : (K1,p) descriptors on first basis
descr2 : (K2,p) descriptros on second basis
list_descr : p-uple( (K1,K1), (K2,K2) ) operators on first and second basis
related to descriptors.
orient_op : p-uple( (K1,K1), (K2,K2) ) operators on first and second basis
related to orientation preservation operators.
ev_sqdiff : (K2,K1) [normalized] matrix of squared eigenvalue differences
Output
------------------------
gradient : (K2*K1) - value of the energy
"""
k1 = descr1_red.shape[0]
k2 = descr2_red.shape[0]
C = C.reshape((k2,k1))
gradient = np.zeros_like(C)
if descr_mu > 0:
gradient += descr_mu * descr_preservation_grad(C, descr1_red, descr2_red)
if lap_mu > 0:
gradient += lap_mu * LB_commutation_grad(C, ev_sqdiff)
if descr_comm_mu > 0:
gradient += descr_comm_mu * oplist_commutation_grad(C, list_descr)
if orient_mu > 0:
gradient += orient_mu * oplist_commutation_grad(C, orient_op)
gradient[:,0] = 0
return gradient.reshape(-1)
| 31.072961
| 125
| 0.60953
| 916
| 7,240
| 4.683406
| 0.10262
| 0.020513
| 0.030303
| 0.034965
| 0.891142
| 0.786713
| 0.744988
| 0.701632
| 0.660839
| 0.629138
| 0
| 0.030698
| 0.257597
| 7,240
| 233
| 126
| 31.072961
| 0.767442
| 0.615193
| 0
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0
| 0.019231
| 0
| 0.403846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0f7ff8baca6cec478989e8cd54068ed2f03951d7
| 62,249
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/managedblockchain/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/managedblockchain/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/managedblockchain/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from botocore.waiter import Waiter
from typing import Union
class Client(BaseClient):
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def create_member(self, ClientRequestToken: str, InvitationId: str, NetworkId: str, MemberConfiguration: Dict) -> Dict:
"""
Creates a member within a Managed Blockchain network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/CreateMember>`_
**Request Syntax**
::
response = client.create_member(
ClientRequestToken='string',
InvitationId='string',
NetworkId='string',
MemberConfiguration={
'Name': 'string',
'Description': 'string',
'FrameworkConfiguration': {
'Fabric': {
'AdminUsername': 'string',
'AdminPassword': 'string'
}
}
}
)
**Response Syntax**
::
{
'MemberId': 'string'
}
**Response Structure**
- *(dict) --*
- **MemberId** *(string) --*
The unique identifier of the member.
:type ClientRequestToken: string
:param ClientRequestToken: **[REQUIRED]**
A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
This field is autopopulated if not provided.
:type InvitationId: string
:param InvitationId: **[REQUIRED]**
The unique identifier of the invitation that is sent to the member to join the network.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network in which the member is created.
:type MemberConfiguration: dict
:param MemberConfiguration: **[REQUIRED]**
Member configuration parameters.
- **Name** *(string) --* **[REQUIRED]**
The name of the member.
- **Description** *(string) --*
An optional description of the member.
- **FrameworkConfiguration** *(dict) --* **[REQUIRED]**
Configuration properties of the blockchain framework relevant to the member.
- **Fabric** *(dict) --*
Attributes of Hyperledger Fabric for a member on a Managed Blockchain network that uses Hyperledger Fabric.
- **AdminUsername** *(string) --* **[REQUIRED]**
The user name for the member\'s initial administrative user.
- **AdminPassword** *(string) --* **[REQUIRED]**
The password for the member\'s initial administrative user. The ``AdminPassword`` must be at least eight characters long and no more than 32 characters. It must contain at least one uppercase letter, one lowercase letter, and one digit. It cannot have a single quote(‘), double quote(“), forward slash(/), backward slash(\), @, or a space.
:rtype: dict
:returns:
"""
pass
def create_network(self, ClientRequestToken: str, Name: str, Framework: str, FrameworkVersion: str, VotingPolicy: Dict, MemberConfiguration: Dict, Description: str = None, FrameworkConfiguration: Dict = None) -> Dict:
"""
Creates a new blockchain network using Amazon Managed Blockchain.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/CreateNetwork>`_
**Request Syntax**
::
response = client.create_network(
ClientRequestToken='string',
Name='string',
Description='string',
Framework='HYPERLEDGER_FABRIC',
FrameworkVersion='string',
FrameworkConfiguration={
'Fabric': {
'Edition': 'STARTER'|'STANDARD'
}
},
VotingPolicy={
'ApprovalThresholdPolicy': {
'ThresholdPercentage': 123,
'ProposalDurationInHours': 123,
'ThresholdComparator': 'GREATER_THAN'|'GREATER_THAN_OR_EQUAL_TO'
}
},
MemberConfiguration={
'Name': 'string',
'Description': 'string',
'FrameworkConfiguration': {
'Fabric': {
'AdminUsername': 'string',
'AdminPassword': 'string'
}
}
}
)
**Response Syntax**
::
{
'NetworkId': 'string',
'MemberId': 'string'
}
**Response Structure**
- *(dict) --*
- **NetworkId** *(string) --*
The unique identifier for the network.
- **MemberId** *(string) --*
The unique identifier for the first member within the network.
:type ClientRequestToken: string
:param ClientRequestToken: **[REQUIRED]**
A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
This field is autopopulated if not provided.
:type Name: string
:param Name: **[REQUIRED]**
The name of the network.
:type Description: string
:param Description:
An optional description for the network.
:type Framework: string
:param Framework: **[REQUIRED]**
The blockchain framework that the network uses.
:type FrameworkVersion: string
:param FrameworkVersion: **[REQUIRED]**
The version of the blockchain framework that the network uses.
:type FrameworkConfiguration: dict
:param FrameworkConfiguration:
Configuration properties of the blockchain framework relevant to the network configuration.
- **Fabric** *(dict) --*
Hyperledger Fabric configuration properties for a Managed Blockchain network that uses Hyperledger Fabric.
- **Edition** *(string) --* **[REQUIRED]**
The edition of Amazon Managed Blockchain that the network uses. For more information, see `Amazon Managed Blockchain Pricing <https://aws.amazon.com/managed-blockchain/pricing/>`__ .
:type VotingPolicy: dict
:param VotingPolicy: **[REQUIRED]**
The voting rules used by the network to determine if a proposal is approved.
- **ApprovalThresholdPolicy** *(dict) --*
Defines the rules for the network for voting on proposals, such as the percentage of ``YES`` votes required for the proposal to be approved and the duration of the proposal. The policy applies to all proposals and is specified when the network is created.
- **ThresholdPercentage** *(integer) --*
The percentage of votes among all members that must be ``YES`` for a proposal to be approved. For example, a ``ThresholdPercentage`` value of ``50`` indicates 50%. The ``ThresholdComparator`` determines the precise comparison. If a ``ThresholdPercentage`` value of ``50`` is specified on a network with 10 members, along with a ``ThresholdComparator`` value of ``GREATER_THAN`` , this indicates that 6 ``YES`` votes are required for the proposal to be approved.
- **ProposalDurationInHours** *(integer) --*
The duration from the time that a proposal is created until it expires. If members cast neither the required number of ``YES`` votes to approve the proposal nor the number of ``NO`` votes required to reject it before the duration expires, the proposal is ``EXPIRED`` and ``ProposalActions`` are not carried out.
- **ThresholdComparator** *(string) --*
Determines whether the vote percentage must be greater than the ``ThresholdPercentage`` or must be greater than or equal to the ``ThreholdPercentage`` to be approved.
:type MemberConfiguration: dict
:param MemberConfiguration: **[REQUIRED]**
Configuration properties for the first member within the network.
- **Name** *(string) --* **[REQUIRED]**
The name of the member.
- **Description** *(string) --*
An optional description of the member.
- **FrameworkConfiguration** *(dict) --* **[REQUIRED]**
Configuration properties of the blockchain framework relevant to the member.
- **Fabric** *(dict) --*
Attributes of Hyperledger Fabric for a member on a Managed Blockchain network that uses Hyperledger Fabric.
- **AdminUsername** *(string) --* **[REQUIRED]**
The user name for the member\'s initial administrative user.
- **AdminPassword** *(string) --* **[REQUIRED]**
The password for the member\'s initial administrative user. The ``AdminPassword`` must be at least eight characters long and no more than 32 characters. It must contain at least one uppercase letter, one lowercase letter, and one digit. It cannot have a single quote(‘), double quote(“), forward slash(/), backward slash(\), @, or a space.
:rtype: dict
:returns:
"""
pass
def create_node(self, ClientRequestToken: str, NetworkId: str, MemberId: str, NodeConfiguration: Dict) -> Dict:
"""
Creates a peer node in a member.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/CreateNode>`_
**Request Syntax**
::
response = client.create_node(
ClientRequestToken='string',
NetworkId='string',
MemberId='string',
NodeConfiguration={
'InstanceType': 'string',
'AvailabilityZone': 'string'
}
)
**Response Syntax**
::
{
'NodeId': 'string'
}
**Response Structure**
- *(dict) --*
- **NodeId** *(string) --*
The unique identifier of the node.
:type ClientRequestToken: string
:param ClientRequestToken: **[REQUIRED]**
A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
This field is autopopulated if not provided.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network in which this node runs.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member that owns this node.
:type NodeConfiguration: dict
:param NodeConfiguration: **[REQUIRED]**
The properties of a node configuration.
- **InstanceType** *(string) --* **[REQUIRED]**
The Amazon Managed Blockchain instance type for the node.
- **AvailabilityZone** *(string) --* **[REQUIRED]**
The Availability Zone in which the node exists.
:rtype: dict
:returns:
"""
pass
def create_proposal(self, ClientRequestToken: str, NetworkId: str, MemberId: str, Actions: Dict, Description: str = None) -> Dict:
"""
Creates a proposal for a change to the network that other members of the network can vote on, for example, a proposal to add a new member to the network. Any member can create a proposal.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/CreateProposal>`_
**Request Syntax**
::
response = client.create_proposal(
ClientRequestToken='string',
NetworkId='string',
MemberId='string',
Actions={
'Invitations': [
{
'Principal': 'string'
},
],
'Removals': [
{
'MemberId': 'string'
},
]
},
Description='string'
)
**Response Syntax**
::
{
'ProposalId': 'string'
}
**Response Structure**
- *(dict) --*
- **ProposalId** *(string) --*
The unique identifier of the proposal.
:type ClientRequestToken: string
:param ClientRequestToken: **[REQUIRED]**
A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
This field is autopopulated if not provided.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network for which the proposal is made.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member that is creating the proposal. This identifier is especially useful for identifying the member making the proposal when multiple members exist in a single AWS account.
:type Actions: dict
:param Actions: **[REQUIRED]**
The type of actions proposed, such as inviting a member or removing a member. The types of ``Actions`` in a proposal are mutually exclusive. For example, a proposal with ``Invitations`` actions cannot also contain ``Removals`` actions.
- **Invitations** *(list) --*
The actions to perform for an ``APPROVED`` proposal to invite an AWS account to create a member and join the network.
- *(dict) --*
An action to invite a specific AWS account to create a member and join the network. The ``InviteAction`` is carried out when a ``Proposal`` is ``APPROVED`` .
- **Principal** *(string) --* **[REQUIRED]**
The AWS account ID to invite.
- **Removals** *(list) --*
The actions to perform for an ``APPROVED`` proposal to remove a member from the network, which deletes the member and all associated member resources from the network.
- *(dict) --*
An action to remove a member from a Managed Blockchain network as the result of a removal proposal that is ``APPROVED`` . The member and all associated resources are deleted from the network.
- **MemberId** *(string) --* **[REQUIRED]**
The unique identifier of the member to remove.
:type Description: string
:param Description:
A description for the proposal that is visible to voting members, for example, \"Proposal to add Example Corp. as member.\"
:rtype: dict
:returns:
"""
pass
def delete_member(self, NetworkId: str, MemberId: str) -> Dict:
"""
Deletes a member. Deleting a member removes the member and all associated resources from the network. ``DeleteMember`` can only be called for a specified ``MemberId`` if the principal performing the action is associated with the AWS account that owns the member. In all other cases, the ``DeleteMember`` action is carried out as the result of an approved proposal to remove a member. If ``MemberId`` is the last member in a network specified by the last AWS account, the network is deleted also.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/DeleteMember>`_
**Request Syntax**
::
response = client.delete_member(
NetworkId='string',
MemberId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network from which the member is removed.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member to remove.
:rtype: dict
:returns:
"""
pass
def delete_node(self, NetworkId: str, MemberId: str, NodeId: str) -> Dict:
"""
Deletes a peer node from a member that your AWS account owns. All data on the node is lost and cannot be recovered.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/DeleteNode>`_
**Request Syntax**
::
response = client.delete_node(
NetworkId='string',
MemberId='string',
NodeId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network that the node belongs to.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member that owns this node.
:type NodeId: string
:param NodeId: **[REQUIRED]**
The unique identifier of the node.
:rtype: dict
:returns:
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def get_member(self, NetworkId: str, MemberId: str) -> Dict:
"""
Returns detailed information about a member.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/GetMember>`_
**Request Syntax**
::
response = client.get_member(
NetworkId='string',
MemberId='string'
)
**Response Syntax**
::
{
'Member': {
'NetworkId': 'string',
'Id': 'string',
'Name': 'string',
'Description': 'string',
'FrameworkAttributes': {
'Fabric': {
'AdminUsername': 'string',
'CaEndpoint': 'string'
}
},
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Member** *(dict) --*
The properties of a member.
- **NetworkId** *(string) --*
The unique identifier of the network to which the member belongs.
- **Id** *(string) --*
The unique identifier of the member.
- **Name** *(string) --*
The name of the member.
- **Description** *(string) --*
An optional description for the member.
- **FrameworkAttributes** *(dict) --*
Attributes relevant to a member for the blockchain framework that the Managed Blockchain network uses.
- **Fabric** *(dict) --*
Attributes of Hyperledger Fabric relevant to a member on a Managed Blockchain network that uses Hyperledger Fabric.
- **AdminUsername** *(string) --*
The user name for the initial administrator user for the member.
- **CaEndpoint** *(string) --*
The endpoint used to access the member's certificate authority.
- **Status** *(string) --*
The status of a member.
* ``CREATING`` - The AWS account is in the process of creating a member.
* ``AVAILABLE`` - The member has been created and can participate in the network.
* ``CREATE_FAILED`` - The AWS account attempted to create a member and creation failed.
* ``DELETING`` - The member and all associated resources are in the process of being deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an ``APPROVED`` ``PROPOSAL`` to remove the member.
* ``DELETED`` - The member can no longer participate on the network and all associated resources are deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an ``APPROVED`` ``PROPOSAL`` to remove the member.
- **CreationDate** *(datetime) --*
The date and time that the member was created.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network to which the member belongs.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member.
:rtype: dict
:returns:
"""
pass
def get_network(self, NetworkId: str) -> Dict:
"""
Returns detailed information about a network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/GetNetwork>`_
**Request Syntax**
::
response = client.get_network(
NetworkId='string'
)
**Response Syntax**
::
{
'Network': {
'Id': 'string',
'Name': 'string',
'Description': 'string',
'Framework': 'HYPERLEDGER_FABRIC',
'FrameworkVersion': 'string',
'FrameworkAttributes': {
'Fabric': {
'OrderingServiceEndpoint': 'string',
'Edition': 'STARTER'|'STANDARD'
}
},
'VpcEndpointServiceName': 'string',
'VotingPolicy': {
'ApprovalThresholdPolicy': {
'ThresholdPercentage': 123,
'ProposalDurationInHours': 123,
'ThresholdComparator': 'GREATER_THAN'|'GREATER_THAN_OR_EQUAL_TO'
}
},
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Network** *(dict) --*
An object containing network configuration parameters.
- **Id** *(string) --*
The unique identifier of the network.
- **Name** *(string) --*
The name of the network.
- **Description** *(string) --*
Attributes of the blockchain framework for the network.
- **Framework** *(string) --*
The blockchain framework that the network uses.
- **FrameworkVersion** *(string) --*
The version of the blockchain framework that the network uses.
- **FrameworkAttributes** *(dict) --*
Attributes of the blockchain framework that the network uses.
- **Fabric** *(dict) --*
Attributes of Hyperledger Fabric for a Managed Blockchain network that uses Hyperledger Fabric.
- **OrderingServiceEndpoint** *(string) --*
The endpoint of the ordering service for the network.
- **Edition** *(string) --*
The edition of Amazon Managed Blockchain that Hyperledger Fabric uses. For more information, see `Amazon Managed Blockchain Pricing <https://aws.amazon.com/managed-blockchain/pricing/>`__ .
- **VpcEndpointServiceName** *(string) --*
The VPC endpoint service name of the VPC endpoint service of the network. Members use the VPC endpoint service name to create a VPC endpoint to access network resources.
- **VotingPolicy** *(dict) --*
The voting rules for the network to decide if a proposal is accepted.
- **ApprovalThresholdPolicy** *(dict) --*
Defines the rules for the network for voting on proposals, such as the percentage of ``YES`` votes required for the proposal to be approved and the duration of the proposal. The policy applies to all proposals and is specified when the network is created.
- **ThresholdPercentage** *(integer) --*
The percentage of votes among all members that must be ``YES`` for a proposal to be approved. For example, a ``ThresholdPercentage`` value of ``50`` indicates 50%. The ``ThresholdComparator`` determines the precise comparison. If a ``ThresholdPercentage`` value of ``50`` is specified on a network with 10 members, along with a ``ThresholdComparator`` value of ``GREATER_THAN`` , this indicates that 6 ``YES`` votes are required for the proposal to be approved.
- **ProposalDurationInHours** *(integer) --*
The duration from the time that a proposal is created until it expires. If members cast neither the required number of ``YES`` votes to approve the proposal nor the number of ``NO`` votes required to reject it before the duration expires, the proposal is ``EXPIRED`` and ``ProposalActions`` are not carried out.
- **ThresholdComparator** *(string) --*
Determines whether the vote percentage must be greater than the ``ThresholdPercentage`` or must be greater than or equal to the ``ThreholdPercentage`` to be approved.
- **Status** *(string) --*
The current status of the network.
- **CreationDate** *(datetime) --*
The date and time that the network was created.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network to get information about.
:rtype: dict
:returns:
"""
pass
def get_node(self, NetworkId: str, MemberId: str, NodeId: str) -> Dict:
"""
Returns detailed information about a peer node.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/GetNode>`_
**Request Syntax**
::
response = client.get_node(
NetworkId='string',
MemberId='string',
NodeId='string'
)
**Response Syntax**
::
{
'Node': {
'NetworkId': 'string',
'MemberId': 'string',
'Id': 'string',
'InstanceType': 'string',
'AvailabilityZone': 'string',
'FrameworkAttributes': {
'Fabric': {
'PeerEndpoint': 'string',
'PeerEventEndpoint': 'string'
}
},
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED'|'FAILED',
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Node** *(dict) --*
Properties of the node configuration.
- **NetworkId** *(string) --*
The unique identifier of the network that the node is in.
- **MemberId** *(string) --*
The unique identifier of the member to which the node belongs.
- **Id** *(string) --*
The unique identifier of the node.
- **InstanceType** *(string) --*
The instance type of the node.
- **AvailabilityZone** *(string) --*
The Availability Zone in which the node exists.
- **FrameworkAttributes** *(dict) --*
Attributes of the blockchain framework being used.
- **Fabric** *(dict) --*
Attributes of Hyperledger Fabric for a peer node on a Managed Blockchain network that uses Hyperledger Fabric.
- **PeerEndpoint** *(string) --*
The endpoint that identifies the peer node for all services except peer channel-based event services.
- **PeerEventEndpoint** *(string) --*
The endpoint that identifies the peer node for peer channel-based event services.
- **Status** *(string) --*
The status of the node.
- **CreationDate** *(datetime) --*
The date and time that the node was created.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network to which the node belongs.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member that owns the node.
:type NodeId: string
:param NodeId: **[REQUIRED]**
The unique identifier of the node.
:rtype: dict
:returns:
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_proposal(self, NetworkId: str, ProposalId: str) -> Dict:
"""
Returns detailed information about a proposal.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/GetProposal>`_
**Request Syntax**
::
response = client.get_proposal(
NetworkId='string',
ProposalId='string'
)
**Response Syntax**
::
{
'Proposal': {
'ProposalId': 'string',
'NetworkId': 'string',
'Description': 'string',
'Actions': {
'Invitations': [
{
'Principal': 'string'
},
],
'Removals': [
{
'MemberId': 'string'
},
]
},
'ProposedByMemberId': 'string',
'ProposedByMemberName': 'string',
'Status': 'IN_PROGRESS'|'APPROVED'|'REJECTED'|'EXPIRED'|'ACTION_FAILED',
'CreationDate': datetime(2015, 1, 1),
'ExpirationDate': datetime(2015, 1, 1),
'YesVoteCount': 123,
'NoVoteCount': 123,
'OutstandingVoteCount': 123
}
}
**Response Structure**
- *(dict) --*
- **Proposal** *(dict) --*
Information about a proposal.
- **ProposalId** *(string) --*
The unique identifier of the proposal.
- **NetworkId** *(string) --*
The unique identifier of the network for which the proposal is made.
- **Description** *(string) --*
The description of the proposal.
- **Actions** *(dict) --*
The actions to perform on the network if the proposal is ``APPROVED`` .
- **Invitations** *(list) --*
The actions to perform for an ``APPROVED`` proposal to invite an AWS account to create a member and join the network.
- *(dict) --*
An action to invite a specific AWS account to create a member and join the network. The ``InviteAction`` is carried out when a ``Proposal`` is ``APPROVED`` .
- **Principal** *(string) --*
The AWS account ID to invite.
- **Removals** *(list) --*
The actions to perform for an ``APPROVED`` proposal to remove a member from the network, which deletes the member and all associated member resources from the network.
- *(dict) --*
An action to remove a member from a Managed Blockchain network as the result of a removal proposal that is ``APPROVED`` . The member and all associated resources are deleted from the network.
- **MemberId** *(string) --*
The unique identifier of the member to remove.
- **ProposedByMemberId** *(string) --*
The unique identifier of the member that created the proposal.
- **ProposedByMemberName** *(string) --*
The name of the member that created the proposal.
- **Status** *(string) --*
The status of the proposal. Values are as follows:
* ``IN_PROGRESS`` - The proposal is active and open for member voting.
* ``APPROVED`` - The proposal was approved with sufficient ``YES`` votes among members according to the ``VotingPolicy`` specified for the ``Network`` . The specified proposal actions are carried out.
* ``REJECTED`` - The proposal was rejected with insufficient ``YES`` votes among members according to the ``VotingPolicy`` specified for the ``Network`` . The specified ``ProposalActions`` are not carried out.
* ``EXPIRED`` - Members did not cast the number of votes required to determine the proposal outcome before the proposal expired. The specified ``ProposalActions`` are not carried out.
* ``ACTION_FAILED`` - One or more of the specified ``ProposalActions`` in a proposal that was approved could not be completed because of an error.
- **CreationDate** *(datetime) --*
The date and time that the proposal was created.
- **ExpirationDate** *(datetime) --*
The date and time that the proposal expires. This is the ``CreationDate`` plus the ``ProposalDurationInHours`` that is specified in the ``ProposalThresholdPolicy`` . After this date and time, if members have not cast enough votes to determine the outcome according to the voting policy, the proposal is ``EXPIRED`` and ``Actions`` are not carried out.
- **YesVoteCount** *(integer) --*
The current total of ``YES`` votes cast on the proposal by members.
- **NoVoteCount** *(integer) --*
The current total of ``NO`` votes cast on the proposal by members.
- **OutstandingVoteCount** *(integer) --*
The number of votes remaining to be cast on the proposal by members. In other words, the number of members minus the sum of ``YES`` votes and ``NO`` votes.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network for which the proposal is made.
:type ProposalId: string
:param ProposalId: **[REQUIRED]**
The unique identifier of the proposal.
:rtype: dict
:returns:
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def list_invitations(self, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns a listing of all invitations made on the specified network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListInvitations>`_
**Request Syntax**
::
response = client.list_invitations(
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Invitations': [
{
'InvitationId': 'string',
'CreationDate': datetime(2015, 1, 1),
'ExpirationDate': datetime(2015, 1, 1),
'Status': 'PENDING'|'ACCEPTED'|'ACCEPTING'|'REJECTED'|'EXPIRED',
'NetworkSummary': {
'Id': 'string',
'Name': 'string',
'Description': 'string',
'Framework': 'HYPERLEDGER_FABRIC',
'FrameworkVersion': 'string',
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
'CreationDate': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Invitations** *(list) --*
The invitations for the network.
- *(dict) --*
An invitation to an AWS account to create a member and join the network.
- **InvitationId** *(string) --*
The unique identifier for the invitation.
- **CreationDate** *(datetime) --*
The date and time that the invitation was created.
- **ExpirationDate** *(datetime) --*
The date and time that the invitation expires. This is the ``CreationDate`` plus the ``ProposalDurationInHours`` that is specified in the ``ProposalThresholdPolicy`` . After this date and time, the invitee can no longer create a member and join the network using this ``InvitationId`` .
- **Status** *(string) --*
The status of the invitation:
* ``PENDING`` - The invitee has not created a member to join the network, and the invitation has not yet expired.
* ``ACCEPTING`` - The invitee has begun creating a member, and creation has not yet completed.
* ``ACCEPTED`` - The invitee created a member and joined the network using the ``InvitationID`` .
* ``REJECTED`` - The invitee rejected the invitation.
* ``EXPIRED`` - The invitee neither created a member nor rejected the invitation before the ``ExpirationDate`` .
- **NetworkSummary** *(dict) --*
A summary of network configuration properties.
- **Id** *(string) --*
The unique identifier of the network.
- **Name** *(string) --*
The name of the network.
- **Description** *(string) --*
An optional description of the network.
- **Framework** *(string) --*
The blockchain framework that the network uses.
- **FrameworkVersion** *(string) --*
The version of the blockchain framework that the network uses.
- **Status** *(string) --*
The current status of the network.
- **CreationDate** *(datetime) --*
The date and time that the network was created.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type MaxResults: integer
:param MaxResults:
The maximum number of invitations to return.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def list_members(self, NetworkId: str, Name: str = None, Status: str = None, IsOwned: bool = None, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns a listing of the members in a network and properties of their configurations.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListMembers>`_
**Request Syntax**
::
response = client.list_members(
NetworkId='string',
Name='string',
Status='CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
IsOwned=True|False,
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Members': [
{
'Id': 'string',
'Name': 'string',
'Description': 'string',
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
'CreationDate': datetime(2015, 1, 1),
'IsOwned': True|False
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Members** *(list) --*
An array of ``MemberSummary`` objects. Each object contains details about a network member.
- *(dict) --*
A summary of configuration properties for a member.
- **Id** *(string) --*
The unique identifier of the member.
- **Name** *(string) --*
The name of the member.
- **Description** *(string) --*
An optional description of the member.
- **Status** *(string) --*
The status of the member.
* ``CREATING`` - The AWS account is in the process of creating a member.
* ``AVAILABLE`` - The member has been created and can participate in the network.
* ``CREATE_FAILED`` - The AWS account attempted to create a member and creation failed.
* ``DELETING`` - The member and all associated resources are in the process of being deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an ``APPROVED`` ``PROPOSAL`` to remove the member.
* ``DELETED`` - The member can no longer participate on the network and all associated resources are deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an ``APPROVED`` ``PROPOSAL`` to remove the member.
- **CreationDate** *(datetime) --*
The date and time that the member was created.
- **IsOwned** *(boolean) --*
An indicator of whether the member is owned by your AWS account or a different AWS account.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network for which to list members.
:type Name: string
:param Name:
The optional name of the member to list.
:type Status: string
:param Status:
An optional status specifier. If provided, only members currently in this status are listed.
:type IsOwned: boolean
:param IsOwned:
An optional Boolean value. If provided, the request is limited either to members that the current AWS account owns (``true`` ) or that other AWS accounts own (``false`` ). If omitted, all members are listed.
:type MaxResults: integer
:param MaxResults:
The maximum number of members to return in the request.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def list_networks(self, Name: str = None, Framework: str = None, Status: str = None, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns information about the networks in which the current AWS account has members.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListNetworks>`_
**Request Syntax**
::
response = client.list_networks(
Name='string',
Framework='HYPERLEDGER_FABRIC',
Status='CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Networks': [
{
'Id': 'string',
'Name': 'string',
'Description': 'string',
'Framework': 'HYPERLEDGER_FABRIC',
'FrameworkVersion': 'string',
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED',
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Networks** *(list) --*
An array of ``NetworkSummary`` objects that contain configuration properties for each network.
- *(dict) --*
A summary of network configuration properties.
- **Id** *(string) --*
The unique identifier of the network.
- **Name** *(string) --*
The name of the network.
- **Description** *(string) --*
An optional description of the network.
- **Framework** *(string) --*
The blockchain framework that the network uses.
- **FrameworkVersion** *(string) --*
The version of the blockchain framework that the network uses.
- **Status** *(string) --*
The current status of the network.
- **CreationDate** *(datetime) --*
The date and time that the network was created.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type Name: string
:param Name:
The name of the network.
:type Framework: string
:param Framework:
An optional framework specifier. If provided, only networks of this framework type are listed.
:type Status: string
:param Status:
An optional status specifier. If provided, only networks currently in this status are listed.
:type MaxResults: integer
:param MaxResults:
The maximum number of networks to list.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def list_nodes(self, NetworkId: str, MemberId: str, Status: str = None, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns information about the nodes within a network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListNodes>`_
**Request Syntax**
::
response = client.list_nodes(
NetworkId='string',
MemberId='string',
Status='CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED'|'FAILED',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Nodes': [
{
'Id': 'string',
'Status': 'CREATING'|'AVAILABLE'|'CREATE_FAILED'|'DELETING'|'DELETED'|'FAILED',
'CreationDate': datetime(2015, 1, 1),
'AvailabilityZone': 'string',
'InstanceType': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Nodes** *(list) --*
An array of ``NodeSummary`` objects that contain configuration properties for each node.
- *(dict) --*
A summary of configuration properties for a peer node.
- **Id** *(string) --*
The unique identifier of the node.
- **Status** *(string) --*
The status of the node.
- **CreationDate** *(datetime) --*
The date and time that the node was created.
- **AvailabilityZone** *(string) --*
The Availability Zone in which the node exists.
- **InstanceType** *(string) --*
The EC2 instance type for the node.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network for which to list nodes.
:type MemberId: string
:param MemberId: **[REQUIRED]**
The unique identifier of the member who owns the nodes to list.
:type Status: string
:param Status:
An optional status specifier. If provided, only nodes currently in this status are listed.
:type MaxResults: integer
:param MaxResults:
The maximum number of nodes to list.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def list_proposal_votes(self, NetworkId: str, ProposalId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns the listing of votes for a specified proposal, including the value of each vote and the unique identifier of the member that cast the vote.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListProposalVotes>`_
**Request Syntax**
::
response = client.list_proposal_votes(
NetworkId='string',
ProposalId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'ProposalVotes': [
{
'Vote': 'YES'|'NO',
'MemberName': 'string',
'MemberId': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **ProposalVotes** *(list) --*
The listing of votes.
- *(dict) --*
Properties of an individual vote that a member cast for a proposal.
- **Vote** *(string) --*
The vote value, either ``YES`` or ``NO`` .
- **MemberName** *(string) --*
The name of the member that cast the vote.
- **MemberId** *(string) --*
The unique identifier of the member that cast the vote.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network.
:type ProposalId: string
:param ProposalId: **[REQUIRED]**
The unique identifier of the proposal.
:type MaxResults: integer
:param MaxResults:
The maximum number of votes to return.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def list_proposals(self, NetworkId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Returns a listing of proposals for the network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/ListProposals>`_
**Request Syntax**
::
response = client.list_proposals(
NetworkId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Proposals': [
{
'ProposalId': 'string',
'Description': 'string',
'ProposedByMemberId': 'string',
'ProposedByMemberName': 'string',
'Status': 'IN_PROGRESS'|'APPROVED'|'REJECTED'|'EXPIRED'|'ACTION_FAILED',
'CreationDate': datetime(2015, 1, 1),
'ExpirationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Proposals** *(list) --*
The summary of each proposal made on the network.
- *(dict) --*
Properties of a proposal.
- **ProposalId** *(string) --*
The unique identifier of the proposal.
- **Description** *(string) --*
The description of the proposal.
- **ProposedByMemberId** *(string) --*
The unique identifier of the member that created the proposal.
- **ProposedByMemberName** *(string) --*
The name of the member that created the proposal.
- **Status** *(string) --*
The status of the proposal. Values are as follows:
* ``IN_PROGRESS`` - The proposal is active and open for member voting.
* ``APPROVED`` - The proposal was approved with sufficient ``YES`` votes among members according to the ``VotingPolicy`` specified for the ``Network`` . The specified proposal actions are carried out.
* ``REJECTED`` - The proposal was rejected with insufficient ``YES`` votes among members according to the ``VotingPolicy`` specified for the ``Network`` . The specified ``ProposalActions`` are not carried out.
* ``EXPIRED`` - Members did not cast the number of votes required to determine the proposal outcome before the proposal expired. The specified ``ProposalActions`` are not carried out.
* ``ACTION_FAILED`` - One or more of the specified ``ProposalActions`` in a proposal that was approved could not be completed because of an error.
- **CreationDate** *(datetime) --*
The date and time that the proposal was created.
- **ExpirationDate** *(datetime) --*
The date and time that the proposal expires. This is the ``CreationDate`` plus the ``ProposalDurationInHours`` that is specified in the ``ProposalThresholdPolicy`` . After this date and time, if members have not cast enough votes to determine the outcome according to the voting policy, the proposal is ``EXPIRED`` and ``Actions`` are not carried out.
- **NextToken** *(string) --*
The pagination token that indicates the next set of results to retrieve.
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network.
:type MaxResults: integer
:param MaxResults:
The maximum number of proposals to return.
:type NextToken: string
:param NextToken:
The pagination token that indicates the next set of results to retrieve.
:rtype: dict
:returns:
"""
pass
def reject_invitation(self, InvitationId: str) -> Dict:
"""
Rejects an invitation to join a network. This action can be called by a principal in an AWS account that has received an invitation to create a member and join a network.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/RejectInvitation>`_
**Request Syntax**
::
response = client.reject_invitation(
InvitationId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type InvitationId: string
:param InvitationId: **[REQUIRED]**
The unique identifier of the invitation to reject.
:rtype: dict
:returns:
"""
pass
def vote_on_proposal(self, NetworkId: str, ProposalId: str, VoterMemberId: str, Vote: str) -> Dict:
"""
Casts a vote for a specified ``ProposalId`` on behalf of a member. The member to vote as, specified by ``VoterMemberId`` , must be in the same AWS account as the principal that calls the action.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/VoteOnProposal>`_
**Request Syntax**
::
response = client.vote_on_proposal(
NetworkId='string',
ProposalId='string',
VoterMemberId='string',
Vote='YES'|'NO'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type NetworkId: string
:param NetworkId: **[REQUIRED]**
The unique identifier of the network.
:type ProposalId: string
:param ProposalId: **[REQUIRED]**
The unique identifier of the proposal.
:type VoterMemberId: string
:param VoterMemberId: **[REQUIRED]**
The unique identifier of the member casting the vote.
:type Vote: string
:param Vote: **[REQUIRED]**
The value of the vote.
:rtype: dict
:returns:
"""
pass
| 50.160355
| 503
| 0.550708
| 6,137
| 62,249
| 5.565749
| 0.077073
| 0.015809
| 0.030038
| 0.031355
| 0.765348
| 0.723161
| 0.69245
| 0.651609
| 0.627221
| 0.597359
| 0
| 0.007155
| 0.353387
| 62,249
| 1,240
| 504
| 50.200806
| 0.841445
| 0.802744
| 0
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.431373
| false
| 0.431373
| 0.117647
| 0
| 0.568627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
7e16d50f2b8bb27d042531c3a9ba86dda1fe35e1
| 26
|
py
|
Python
|
src/utoolbox/analysis/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | 2
|
2020-09-03T06:22:14.000Z
|
2020-10-04T10:14:56.000Z
|
src/utoolbox/analysis/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | null | null | null |
src/utoolbox/analysis/__init__.py
|
liuyenting/utoolbox-legacy
|
dfcb24701ca25a37a223cc3c14b4433e6c296bfd
|
[
"Apache-2.0"
] | null | null | null |
from .psf_average import *
| 26
| 26
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7e1d3d541d85e58571981ae94c9e6475d6e12119
| 36
|
py
|
Python
|
gpenkf/experiments/data_provider.py
|
danilkuzin/GP-EnKF
|
215623e0f322ddae9757854e7278b60e11e570bf
|
[
"MIT"
] | 12
|
2018-11-09T10:08:36.000Z
|
2021-07-11T05:04:52.000Z
|
gpenkf/experiments/data_provider.py
|
danilkuzin/GP-EnKF
|
215623e0f322ddae9757854e7278b60e11e570bf
|
[
"MIT"
] | null | null | null |
gpenkf/experiments/data_provider.py
|
danilkuzin/GP-EnKF
|
215623e0f322ddae9757854e7278b60e11e570bf
|
[
"MIT"
] | 1
|
2019-10-29T05:57:47.000Z
|
2019-10-29T05:57:47.000Z
|
class DataProvider(object):
pass
| 18
| 27
| 0.75
| 4
| 36
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 36
| 2
| 28
| 18
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7e6747322870e1638525eb73b9c3794ae77c083c
| 39
|
py
|
Python
|
src/controllers/__init__.py
|
aurelmegn/boilerplate_flask
|
8438fe2917155a3f53b10e6b833e04f935972c1d
|
[
"MIT"
] | null | null | null |
src/controllers/__init__.py
|
aurelmegn/boilerplate_flask
|
8438fe2917155a3f53b10e6b833e04f935972c1d
|
[
"MIT"
] | null | null | null |
src/controllers/__init__.py
|
aurelmegn/boilerplate_flask
|
8438fe2917155a3f53b10e6b833e04f935972c1d
|
[
"MIT"
] | null | null | null |
from src import app
from . import index
| 19.5
| 19
| 0.794872
| 7
| 39
| 4.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 39
| 2
| 20
| 19.5
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0e1b4b76e7d74de2cc0e5a75026a83cff3331792
| 77
|
py
|
Python
|
prometheus_toolbox/expose/django/__init__.py
|
vbilyi/prometheus_toolbox
|
6b21fa39148cf685fc16117716b0374bf9962f44
|
[
"MIT"
] | null | null | null |
prometheus_toolbox/expose/django/__init__.py
|
vbilyi/prometheus_toolbox
|
6b21fa39148cf685fc16117716b0374bf9962f44
|
[
"MIT"
] | null | null | null |
prometheus_toolbox/expose/django/__init__.py
|
vbilyi/prometheus_toolbox
|
6b21fa39148cf685fc16117716b0374bf9962f44
|
[
"MIT"
] | null | null | null |
from .urls import urlpatterns
from .middleware import AfterRequestMiddleware
| 25.666667
| 46
| 0.87013
| 8
| 77
| 8.375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 77
| 2
| 47
| 38.5
| 0.971014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0e1b9399fdef9346a23fcf62a65bc68cc2621f63
| 211
|
py
|
Python
|
django/researchdata/views/visualise.py
|
linguindic/linguindic-website
|
545c64bc55b9502ef3db8ac00172d6240495b526
|
[
"MIT"
] | 1
|
2020-09-22T10:07:52.000Z
|
2020-09-22T10:07:52.000Z
|
django/researchdata/views/visualise.py
|
linguindic/linguindic-website
|
545c64bc55b9502ef3db8ac00172d6240495b526
|
[
"MIT"
] | null | null | null |
django/researchdata/views/visualise.py
|
linguindic/linguindic-website
|
545c64bc55b9502ef3db8ac00172d6240495b526
|
[
"MIT"
] | null | null | null |
from django.views.generic import (TemplateView)
class VisualiseTemplateView(TemplateView):
"""
Class-based view to show the visualise template
"""
template_name = 'researchdata/visualise.html'
| 23.444444
| 51
| 0.739336
| 22
| 211
| 7.045455
| 0.818182
| 0.219355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170616
| 211
| 8
| 52
| 26.375
| 0.885714
| 0.222749
| 0
| 0
| 0
| 0
| 0.182432
| 0.182432
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0e28084c64f1646579ccd82aae8b32facac3bfb0
| 38
|
py
|
Python
|
wrench/semisupervisedlearning/__init__.py
|
rpryzant/wrench
|
3668c359aeff18724e927a207a85da17f2ead823
|
[
"Apache-2.0"
] | 1
|
2021-11-24T04:01:08.000Z
|
2021-11-24T04:01:08.000Z
|
wrench/semisupervisedlearning/__init__.py
|
yinkaiw/wrench
|
f20135eb9b1d51b5bad92b3a910efd92235df356
|
[
"Apache-2.0"
] | null | null | null |
wrench/semisupervisedlearning/__init__.py
|
yinkaiw/wrench
|
f20135eb9b1d51b5bad92b3a910efd92235df356
|
[
"Apache-2.0"
] | null | null | null |
from .meanteacher import MeanTeacher
| 19
| 37
| 0.842105
| 4
| 38
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0e5991b3f7f2e32575d6b56e6a60a86992d85967
| 128,928
|
py
|
Python
|
raet/road/transacting.py
|
Mattlk13/raet
|
e7bd8bba5ea21e98614c50b31d2334b2a95ce1f2
|
[
"Apache-2.0"
] | 36
|
2015-02-10T20:39:59.000Z
|
2022-01-09T18:36:47.000Z
|
raet/road/transacting.py
|
Mattlk13/raet
|
e7bd8bba5ea21e98614c50b31d2334b2a95ce1f2
|
[
"Apache-2.0"
] | 11
|
2016-10-10T17:19:47.000Z
|
2020-04-23T16:35:47.000Z
|
raet/road/transacting.py
|
Mattlk13/raet
|
e7bd8bba5ea21e98614c50b31d2334b2a95ce1f2
|
[
"Apache-2.0"
] | 5
|
2015-06-01T09:00:00.000Z
|
2020-04-23T17:06:20.000Z
|
# -*- coding: utf-8 -*-
'''
stacking.py raet protocol stacking classes
'''
# pylint: skip-file
# pylint: disable=W0611
# Import python libs
import socket
import binascii
import struct
try:
import simplejson as json
except ImportError:
import json
# Import ioflo libs
from ioflo.aid.odicting import odict
from ioflo.aid.osetting import oset
from ioflo.aid.timing import StoreTimer
from ioflo.aid.aiding import packByte, unpackByte
# Import raet libs
from ..abiding import * # import globals
from .. import raeting
from ..raeting import Acceptance, PcktKind, TrnsKind, CoatKind, FootKind
from .. import nacling
from . import packeting
from . import estating
from ioflo.base.consoling import getConsole
console = getConsole()
class Transaction(object):
'''
RAET protocol transaction class
'''
Timeout = 5.0 # default timeout
def __init__(self, stack=None, remote=None, kind=None, timeout=None,
rmt=False, bcst=False, sid=None, tid=None,
txData=None, txPacket=None, rxPacket=None):
'''
Setup Transaction instance
timeout of 0.0 means no timeout go forever
'''
self.stack = stack
self.remote = remote
self.kind = kind or raeting.PACKET_DEFAULTS['tk']
if timeout is None:
timeout = self.Timeout
self.timeout = timeout
self.timer = StoreTimer(self.stack.store, duration=self.timeout)
self.rmt = rmt # remote initiator
self.bcst = bcst # bf flag
self.sid = sid
self.tid = tid
self.txData = txData or odict() # data used to prepare last txPacket
self.txPacket = txPacket # last tx packet needed for retries
self.rxPacket = rxPacket # last rx packet needed for index
@property
def index(self):
'''
Property is transaction tuple (rf, le, re, si, ti, bf,)
Not to be used in join (Joiner and Joinent) since bootstrapping
Use the txPacket (Joiner) or rxPacket (Joinent) .data instead
'''
le = self.remote.nuid
re = self.remote.fuid
return ((self.rmt, le, re, self.sid, self.tid, self.bcst,))
def process(self):
'''
Process time based handling of transaction like timeout or retries
'''
pass
def receive(self, packet):
'''
Process received packet Subclasses should super call this
'''
self.rxPacket = packet
def transmit(self, packet):
'''
Queue tx duple on stack transmit queue
'''
try:
self.stack.tx(packet.packed, self.remote.uid)
except raeting.StackError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat(self.statKey())
self.remove(remote=self.remote, index=packet.index)
return
self.txPacket = packet
def add(self, remote=None, index=None):
'''
Add self to remote transactions
'''
if not index:
index = self.index
if not remote:
remote = self.remote
remote.addTransaction(index, self)
def remove(self, remote=None, index=None):
'''
Remove self from remote transactions
'''
if not index:
index = self.index
if not remote:
remote = self.remote
if remote:
remote.removeTransaction(index, transaction=self)
def statKey(self):
'''
Return the stat name key from class name
'''
return ("{0}_transaction_failure".format(self.__class__.__name__.lower()))
def nack(self, **kwa):
'''
Placeholder override in sub class
nack to terminate transaction with other side of transaction
'''
pass
class Initiator(Transaction):
'''
RAET protocol initiator transaction class
'''
def __init__(self, **kwa):
'''
Setup Transaction instance
'''
kwa['rmt'] = False # force rmt to False since local initator
super(Initiator, self).__init__(**kwa)
def process(self):
'''
Process time based handling of transaction like timeout or retries
'''
if self.timeout > 0.0 and self.timer.expired:
self.remove()
class Correspondent(Transaction):
'''
RAET protocol correspondent transaction class
'''
Requireds = ['sid', 'tid', 'rxPacket']
def __init__(self, **kwa):
'''
Setup Transaction instance
'''
kwa['rmt'] = True # force rmt to True since remote initiator
missing = []
for arg in self.Requireds:
if arg not in kwa:
missing.append(arg)
if missing:
emsg = "Missing required keyword arguments: '{0}'".format(missing)
raise TypeError(emsg)
super(Correspondent, self).__init__(**kwa)
class Staler(Initiator):
'''
RAET protocol Staler initiator transaction class
'''
def __init__(self, **kwa):
'''
Setup Transaction instance
'''
for key in ['kind', 'sid', 'tid', 'rxPacket']:
if key not in kwa:
emsg = "Missing required keyword arguments: '{0}'".format(key)
raise TypeError(emsg)
super(Staler, self).__init__(**kwa)
self.prep()
def prep(self):
'''
Prepare .txData for nack to stale
'''
self.txData.update(
dh=self.rxPacket.data['sh'], # may need for index
dp=self.rxPacket.data['sp'], # may need for index
se=self.remote.nuid,
de=self.rxPacket.data['se'],
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,
ck=self.rxPacket.data['ck'], # CoatKind.nada.value,
fk=self.rxPacket.data['fk'], # FootKind.nada.value
)
def nack(self):
'''
Send nack to stale packet from correspondent.
This is used when a correspondent packet is received but no matching
Initiator transaction is found. So create a dummy initiator and send
a nack packet back. Do not add transaction so don't need to remove it.
'''
ha = (self.rxPacket.data['sh'], self.rxPacket.data['sp'])
try:
tkname = TrnsKind(self.rxPacket.data['tk'])
except ValueError as ex:
tkname = None
try:
pkname = TrnsKind(self.rxPacket.data['pk'])
except ValueError as ex:
pkname = None
emsg = ("Staler '{0}'. Stale transaction '{1}' packet '{2}' from '{3}' in {4} "
"nacking...\n".format(self.stack.name, tkname, pkname, ha, self.tid))
console.terse(emsg)
self.stack.incStat('stale_correspondent_attempt')
if self.rxPacket.data['se'] not in self.stack.remotes:
emsg = "Staler '{0}'. Unknown correspondent estate id '{1}'\n".format(
self.stack.name, self.rxPacket.data['se'])
console.terse(emsg)
self.stack.incStat('unknown_correspondent_uid')
#return #maybe we should return and not respond at all in this case
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.nack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
return
self.stack.txes.append((packet.packed, ha))
console.terse("Staler '{0}'. Do Nack of stale correspondent {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
self.stack.incStat('stale_correspondent_nack')
class Stalent(Correspondent):
'''
RAET protocol Stalent correspondent transaction class
'''
Requireds = ['kind', 'sid', 'tid', 'rxPacket']
def __init__(self, **kwa):
'''
Setup Transaction instance
'''
super(Stalent, self).__init__(**kwa)
self.prep()
def prep(self):
'''
Prepare .txData for nack to stale
'''
self.txData.update(
dh=self.rxPacket.data['sh'], # may need for index
dp=self.rxPacket.data['sp'], # may need for index
se=self.rxPacket.data['de'],
de=self.rxPacket.data['se'],
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,
ck=self.rxPacket.data['ck'], # CoatKind.nada.value
fk=self.rxPacket.data['fk'], # FootKind.nada.value
)
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to stale packet from initiator.
This is used when a initiator packet is received but with a stale session id
So create a dummy correspondent and send a nack packet back.
Do not add transaction so don't need to remove it.
'''
ha = (self.rxPacket.data['sh'], self.rxPacket.data['sp'])
try:
tkname = TrnsKind(self.rxPacket.data['tk'])
except ValueError as ex:
tkname = None
try:
pkname = TrnsKind(self.rxPacket.data['pk'])
except ValueError as ex:
pkname = None
emsg = ("Stalent '{0}'. Stale transaction '{1}' packet '{2}' from '{3}' in {4} "
"nacking ...\n".format(self.stack.name, tkname, pkname, ha, self.tid))
console.terse(emsg)
self.stack.incStat('stale_initiator_attempt')
if self.rxPacket.data['se'] not in self.stack.remotes:
emsg = "Stalent '{0}'. Unknown initiator estate id '{1}'\n".format(
self.stack.name,
self.rxPacket.data['se'])
console.terse(emsg)
self.stack.incStat('unknown_initiator_uid')
#return #maybe we should return and not respond at all in this case
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
return
if kind == PcktKind.renew:
console.terse("Stalent '{0}'. Do Renew of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.refuse:
console.terse("Stalent '{0}'. Do Refuse of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.reject:
console.terse("Stalent '{0}'. Do Reject of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Stalent '{0}'. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
else:
console.terse("Stalent '{0}'. Invalid nack kind {1}. Do Nack of {2} anyway "
" to {3) at {4}\n".format(self.stack.name,
kind,
ha,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.stack.txes.append((packet.packed, ha))
self.stack.incStat('stale_initiator_nack')
class Joiner(Initiator):
'''
RAET protocol Joiner Initiator class Dual of Joinent
Joiner must always add new remote since always must anticipate response to
request.
'''
RedoTimeoutMin = 1.0 # initial timeout
RedoTimeoutMax = 4.0 # max timeout
PendRedoTimeout = 60.0 # Redo timeout when pended
def __init__(self,
redoTimeoutMin=None,
redoTimeoutMax=None,
pendRedoTimeout=None,
cascade=False,
renewal=False,
**kwa):
'''
Setup Transaction instance
'''
kwa['kind'] = TrnsKind.join.value
super(Joiner, self).__init__(**kwa)
self.cascade = cascade
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.pendRedoTimeout = pendRedoTimeout or self.PendRedoTimeout
self.sid = 0 #always 0 for join
self.tid = self.remote.nextTid()
# fuid is assigned during join but want to preserve vacuousness for remove
self.vacuous = (self.remote.fuid == 0)
self.renewal = renewal # is current join a renew, vacuous rejoin
self.pended = False # Farside Correspondent has pended remote acceptance
self.prep()
# don't dump remote yet since its ephemeral until we join and get valid uid
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Joiner, self).transmit(packet)
self.redoTimer.restart()
def add(self, remote=None, index=None):
'''
Augment with add self.remote to stack.joinees if vacuous
'''
super(Joiner, self).add(remote=remote, index=index)
# self.remote is now assigned
if self.vacuous: # vacuous
self.stack.joinees[self.remote.ha] = self.remote
def remove(self, remote=None, index=None):
'''
Remove self from stack transactions
'''
super(Joiner, self).remove(remote=remote, index=index)
# self.remote is now assigned
if self.vacuous: # vacuous
if self.remote.ha in self.stack.joinees and not self.remote.transactions:
del self.stack.joinees[self.remote.ha]
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Joiner, self).receive(packet) # self.rxPacket = packet
if packet.data['tk'] == TrnsKind.join:
if packet.data['pk'] == PcktKind.pend: # pending
self.stack.incStat('joiner_rx_pend')
self.pend()
elif packet.data['pk'] == PcktKind.response: # accepted
self.stack.incStat('joiner_rx_response')
self.accept()
elif packet.data['pk'] == PcktKind.nack: #stale
self.stack.incStat('joiner_rx_nack')
self.refuse()
elif packet.data['pk'] == PcktKind.refuse: #refused
self.stack.incStat('joiner_rx_refuse')
self.refuse()
elif packet.data['pk'] == PcktKind.renew: #renew
self.stack.incStat('joiner_rx_renew')
self.renew()
elif packet.data['pk'] == PcktKind.reject: #rejected
self.stack.incStat('joiner_rx_reject')
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
if self.txPacket and self.txPacket.data['pk'] == PcktKind.request:
self.remove(index=self.txPacket.index)
else:
self.remove(index=self.index) # in case never sent txPacket
console.concise("Joiner {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
# need keep sending join until accepted or timed out
if self.redoTimer.expired:
if not self.pended:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
else:
duration = self.pendRedoTimeout
self.redoTimer.restart(duration=duration)
if (self.txPacket and
self.txPacket.data['pk'] == PcktKind.request):
self.transmit(self.txPacket) #redo
console.concise("Joiner {0}. Redo Join with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('joiner_tx_join_redo')
else: #check to see if status has changed to accept after other kind
if self.remote:
status = self.stack.keep.statusRemote(self.remote, dump=True)
if status == Acceptance.accepted:
self.completify()
elif status == Acceptance.rejected:
"Joiner {0}: Estate '{1}' uid '{2}' keys rejected\n".format(
self.stack.name, self.remote.name, self.remote.uid)
self.stack.removeRemote(self.remote, clear=True)
# removeRemote also nacks
def prep(self):
'''
Prepare .txData
'''
self.txData.update(
dh=self.remote.ha[0], # may need for index
dp=self.remote.ha[1], # may need for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,
ck=CoatKind.nada.value,
fk=FootKind.nada.value
)
def join(self):
'''
Send join request
'''
joins = self.remote.joinInProcess()
if joins:
emsg = ("Joiner {0}. Join with {1} already in process. "
"Aborting...\n".format(
self.stack.name,
self.remote.name))
console.concise(emsg)
return
self.remote.joined = None
if self.stack.kind is None:
self.stack.kind = 0
else:
if self.stack.kind < 0 or self.stack.kind > 255:
emsg = ("Joiner {0}. Invalid application kind field value {1} for {2}. "
"Aborting...\n".format(
self.stack.name,
self.stack.kind,
self.remote.name))
console.concise(emsg)
return
flags = [0, 0, 0, 0, 0, 0, 0, self.stack.main] # stack operation mode flags
operation = packByte(fmt=b'11111111', fields=flags)
body = odict([('name', self.stack.local.name),
('mode', operation),
('kind', self.stack.kind),
('verhex', str(self.stack.local.signer.verhex.decode('ISO-8859-1'))
if self.stack.local.signer.verhex else None ),
('pubhex', str(self.stack.local.priver.pubhex.decode('ISO-8859-1'))
if self.stack.local.priver.pubhex else None),
('role', self.stack.local.role)])
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.request.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
console.concise("Joiner {0}. Do Join with {1} in {2} at {3}\n".format(
self.stack.name,
self.remote.name,
self.tid,
self.stack.store.stamp))
self.transmit(packet)
self.add(index=self.txPacket.index)
def renew(self):
'''
Perform renew in response to nack renew
Reset to vacuous Road data and try joining again if not main
Otherwise act as if rejected
'''
# renew not allowed on immutable road
if not self.stack.mutable:
self.stack.incStat('join_renew_unallowed')
emsg = ("Joiner {0}. Renew from '{1}' not allowed on immutable"
" road\n".format(self.stack.name, self.remote.name))
console.terse(emsg)
self.refuse()
return
console.terse("Joiner {0}. Renew from {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('join_renew_attempt')
self.remove(index=self.txPacket.index)
if self.remote:
self.remote.fuid = 0 # forces vacuous join
self.stack.dumpRemote(self.remote) # since change fuid
self.stack.join(uid=self.remote.uid, timeout=self.timeout, renewal=True)
def pend(self):
'''
Process ack pend to join packet
'''
if not self.stack.parseInner(self.rxPacket):
return
self.pended = True
def accept(self):
'''
Perform acceptance in response to join response packet
'''
if not self.stack.parseInner(self.rxPacket):
return
data = self.rxPacket.data
body = self.rxPacket.body.data
name = body.get('name')
if not name:
emsg = "Missing remote name in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
mode = body.get('mode')
if mode is None or not isinstance(mode, int) or mode < 0 or mode > 255:
emsg = "Missing or invalid remote stack operation mode in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
flags = unpackByte(fmt=b'11111111', byte=mode, boolean=True)
main = flags[7]
kind = body.get('kind')
if kind is None:
emsg = "Missing or invalid remote application kind in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
fuid = body.get('uid')
if not fuid: # None or zero
emsg = "Missing or invalid remote farside uid in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
verhex = body.get('verhex', '')
if not verhex:
emsg = "Missing remote verifier key in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
pubhex = body.get('pubhex', '')
if not pubhex:
emsg = "Missing remote crypt key in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
role = body.get('role')
if not role:
emsg = "Missing remote role in accept packet\n"
console.terse(emsg)
self.stack.incStat('invalid_accept')
self.remove(index=self.txPacket.index)
return
rha = (data['sh'], data['sp'])
reid = data['se']
leid = data['de']
if self.vacuous:
self.remote.fuid = fuid
if not self.renewal: # ephemeral like
if name != self.remote.name:
if name in self.stack.nameRemotes:
emsg = ("Joiner {0}. New name '{1}' unavailable for "
"remote {2}\n".format(self.stack.name,
name,
self.remote.name))
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
try:
self.stack.renameRemote(self.remote, new=name)
except raeting.StackError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat(self.statKey())
self.remove(index=self.txPacket.index)
return
self.remote.main = main
self.remote.kind = kind
self.remote.fuid = fuid
self.remote.role = role
self.remote.verfer = nacling.Verifier(verhex) # verify key manager
self.remote.pubber = nacling.Publican(pubhex) # long term crypt key manager
sameRoleKeys = (role == self.remote.role and
ns2b(verhex) == self.remote.verfer.keyhex and
ns2b(pubhex) == self.remote.pubber.keyhex)
sameAll = (sameRoleKeys and
name == self.remote.name and
rha == self.remote.ha and
fuid == self.remote.fuid and
main == self.remote.main and
kind == self.remote.kind)
if not sameAll and not self.stack.mutable:
emsg = ("Joiner {0}. Attempt to change immutable road by "
"'{1}'\n".format(self.stack.name,
self.remote.name))
console.terse(emsg)
self.nack(kind=PcktKind.reject.value) # reject not mutable road
self.remove(index=self.txPacket.index)
return
status = self.stack.keep.statusRole(role=role,
verhex=verhex,
pubhex=pubhex,
dump=True)
if status == Acceptance.rejected:
if sameRoleKeys:
self.stack.removeRemote(self.remote, clear=True)
# remove also nacks so will also reject
else:
self.nack(kind=PcktKind.reject.value) # reject
return
# accepted or pending
self.remote.acceptance = status # change acceptance of remote
if not sameAll: # (and mutable)
if (name in self.stack.nameRemotes and
self.stack.nameRemotes[name] is not self.remote): # non unique name
emsg = "Joiner {0}. Name '{1}' unavailable for remote {2}\n".format(
self.stack.name, name, self.remote.name)
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
if name != self.remote.name:
try:
self.stack.renameRemote(self.remote, new=name)
except raeting.StackError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat(self.statKey())
self.remove(index=self.txPacket.index)
return
if rha != self.remote.ha:
self.remote.ha = rha
if fuid != self.remote.fuid:
self.remote.fuid = fuid
if main != self.remote.main:
self.remote.main = main
if kind != self.remote.kind:
self.remote.kind = kind
if self.remote.role != role:
self.remote.role = role # rerole
if ns2b(verhex) != self.remote.verfer.keyhex:
self.remote.verfer = nacling.Verifier(verhex) # verify key manager
if ns2b(pubhex) != self.remote.pubber.keyhex:
self.remote.pubber = nacling.Publican(pubhex) # long term crypt key manager
# don't dump until complete
if status == Acceptance.accepted: # accepted
self.completify()
return
# else status == raeting.acceptance.pending or None
self.pendify()
def pendify(self):
'''
Perform pending on remote
'''
self.stack.dumpRemote(self.remote)
self.ackPend()
def ackPend(self):
'''
Send ack pending to accept response
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.pend.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.txPacket.index)
return
console.concise("Joiner {0}. Do Ack Pend of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.transmit(packet)
def completify(self):
'''
Finalize full acceptance
'''
if self.remote.sid == 0: # session id must be non-zero after join
self.remote.nextSid() # start new session
self.remote.replaceStaleInitiators() # this join not stale since sid == 0
if self.vacuous:
self.remote.rsid = 0 # reset .rsid on vacuous join so allow will work
self.remote.joined = True #accepted
self.stack.dumpRemote(self.remote)
self.stack.dumpLocal() #persist puid
self.ackAccept()
def ackAccept(self):
'''
Send ack accepted to accept response
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.ack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.txPacket.index)
return
console.concise("Joiner {0}. Do Ack Accept, Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("join_initiate_complete")
self.transmit(packet)
self.remove(index=self.txPacket.index) # self.rxPacket.index
if self.cascade:
self.stack.allow(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
def refuse(self):
'''
Process nack to join packet refused as join already in progress or some
other problem that does not change the joined attribute
'''
if not self.stack.parseInner(self.rxPacket):
return
console.terse("Joiner {0}. Refused by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.remove(index=self.txPacket.index)
def reject(self):
'''
Process nack to join packet, join rejected
'''
if not self.stack.parseInner(self.rxPacket):
return
console.terse("Joiner {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.remove(index=self.txPacket.index)
self.stack.removeRemote(self.remote, clear=True)
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to accept response
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.txPacket.index)
return
if kind == PcktKind.refuse:
console.terse("Joiner {0}. Do Nack Refuse of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.reject:
console.terse("Joiner {0}. Do Nack Reject of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Joiner {0}. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
else:
console.terse("Joiner {0}. Invalid nack kind {1}. Do Nack of {2} anyway "
"in {3} at {4}\n".format(self.stack.name,
kind,
self.remote.name,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.stack.incStat(self.statKey())
self.transmit(packet)
self.remove(index=self.txPacket.index)
class Joinent(Correspondent):
'''
RAET protocol Joinent transaction class, dual of Joiner
Joinent does not add new remote to .remotes if rejected
'''
RedoTimeoutMin = 0.1 # initial timeout
RedoTimeoutMax = 2.0 # max timeout
PendRedoTimeout = 60.0 # redo timeout when pended
def __init__(self,
redoTimeoutMin=None,
redoTimeoutMax=None,
pendRedoTimeout=None,
**kwa):
'''
Setup Transaction instance
'''
kwa['kind'] = TrnsKind.join.value
super(Joinent, self).__init__(**kwa)
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store, duration=0.0)
self.pendRedoTimeout = pendRedoTimeout or self.PendRedoTimeout
self.vacuous = None # gets set in join method
self.pended = False # Farside initiator has pended remote acceptance
self.prep()
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Joinent, self).transmit(packet)
self.redoTimer.restart()
def add(self, remote=None, index=None):
'''
Augment with add self.remote to stack.joinees if vacuous
'''
super(Joinent, self).add(remote=remote, index=index)
# self.remote is now assigned
if self.vacuous: # vacuous happens when both sides vacuous
self.stack.joinees[self.remote.ha] = self.remote
def remove(self, remote=None, index=None):
'''
Remove self from stack transactions
'''
super(Joinent, self).remove(remote=remote, index=index)
# self.remote is now assigned
if self.vacuous: # vacuous
if self.remote.ha in self.stack.joinees and not self.remote.transactions:
del self.stack.joinees[self.remote.ha]
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Joinent, self).receive(packet) # self.rxPacket = packet
if packet.data['tk'] == TrnsKind.join:
if packet.data['pk'] == PcktKind.request:
self.stack.incStat('joinent_rx_request')
self.join()
elif packet.data['pk'] == PcktKind.pend: # maybe pending
self.stack.incStat('joinent_rx_pend')
self.pend()
elif packet.data['pk'] == PcktKind.ack: #accepted by joiner
self.stack.incStat('joinent_rx_ack')
self.complete()
elif packet.data['pk'] == PcktKind.nack: #stale
self.stack.incStat('joinent_rx_nack')
self.refuse()
elif packet.data['pk'] == PcktKind.refuse: #refused
self.stack.incStat('joinent_rx_refuse')
self.refuse()
elif packet.data['pk'] == PcktKind.reject: #rejected
self.stack.incStat('joinent_rx_reject')
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.nack() # stale
console.concise("Joinent {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
# need to perform the check for accepted status and then send accept
if self.redoTimer.expired:
if not self.pended:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
else:
duration = self.pendRedoTimeout
self.redoTimer.restart(duration=duration)
if (self.txPacket and
self.txPacket.data['pk'] == PcktKind.response):
self.transmit(self.txPacket) #redo
console.concise("Joinent {0}. Redo Accept with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('joinent_tx_accept_redo')
else: #check to see if status has changed to accept
if self.remote:
status = self.stack.keep.statusRemote(self.remote, dump=True)
if status == Acceptance.accepted:
self.ackAccept()
elif status == Acceptance.rejected:
"Stack {0}: Estate '{1}' uid '{2}' keys rejected\n".format(
self.stack.name, self.remote.name, self.remote.uid)
self.stack.removeRemote(self.remote,clear=True)
# removeRemote also nacks
def prep(self):
'''
Prepare .txData
'''
#since bootstrap transaction use the reversed seid and deid from packet
self.txData.update(
dh=self.rxPacket.data['sh'], # may need for index
dp=self.rxPacket.data['sp'], # may need for index
se=self.rxPacket.data['de'],
de=self.rxPacket.data['se'],
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,
ck=CoatKind.nada.value,
fk=FootKind.nada.value,
)
def join(self):
'''
Process join packet
Each estate must have a set of unique credentials on the road
The credentials are.
uid (estate id), name, ha (host address, port)
Each of the three credentials must be separably unique on the Road, that is
the uid must be unique, the name must be unique, the ha must be unique.
The other credentials are the role and keys. Multiple estates may share
the same role and associated keys. The keys are the signing key and the
encryption key.
Once an estate has joined the first time it will be assigned an uid.
Changing any of the credentials after this requires that the Road be mutable.
'''
if not self.stack.parseInner(self.rxPacket):
return
# Don't add transaction yet wait till later until transaction is permitted
# as not a duplicate and role keys are not rejected
data = self.rxPacket.data
body = self.rxPacket.body.data
name = body.get('name')
if not name:
emsg = "Missing remote name in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
mode = body.get('mode')
if mode is None or not isinstance(mode, int) or mode < 0 or mode > 255:
emsg = "Missing or invalid remote stack operation mode in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
flags = unpackByte(fmt=b'11111111', byte=mode, boolean=True)
main = flags[7]
kind = body.get('kind')
if kind is None:
emsg = "Missing or invalid remote application kind in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
verhex = body.get('verhex', '')
if not verhex:
emsg = "Missing remote verifier key in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
pubhex = body.get('pubhex', '')
if not pubhex:
emsg = "Missing remote crypt key in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
role = body.get('role')
if not role:
emsg = "Missing remote role in join packet\n"
console.terse(emsg)
self.stack.incStat('invalid_join')
self.remove(index=self.rxPacket.index)
return
rha = (data['sh'], data['sp'])
reid = data['se']
leid = data['de']
self.vacuous = (leid == 0)
joins = self.remote.joinInProcess()
for join in joins: # only one join at a time is permitted
if join is self: # duplicate join packet so drop
emsg = ("Joinent {0}. Duplicate join from {1}. "
"Dropping...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('duplicate_join_attempt')
return
if join.rmt: # is already a correspondent to a join
emsg = ("Joinent {0}. Another joinent already in process with {1}. "
"Aborting...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('redundant_join_attempt')
self.nack(kind=PcktKind.refuse.value)
return
else: # already initiator join in process, resolve race condition
if self.vacuous and not join.vacuous: # non-vacuous beats vacuous
emsg = ("Joinent {0}. Already initiated non-vacuous join with {1}. "
"Aborting because vacuous...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('redundant_join_attempt')
self.nack(kind=PcktKind.refuse.value)
return
if not self.vacuous and join.vacuous: # non-vacuous beats vacuous
emsg = ("Joinent {0}. Removing vacuous initiator join with"
" {1}. Proceeding because not vacuous...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
join.nack(kind=PcktKind.refuse.value)
else: # both vacuous or non-vacuous, so use name to resolve
if self.stack.local.name < name: # abort local correspondent and remote initiator
emsg = ("Joinent {0}. Already initiated join with {1}. "
"Aborting because lesser local name...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('redundant_join_attempt')
self.nack(kind=PcktKind.refuse.value)
return
else: # nack to abort local initiator and remote correspondent
emsg = ("Joinent {0}. Removing initiator join with {1}. "
"Proceeding because lesser local name...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
join.nack(kind=PcktKind.refuse.value)
if self.vacuous: # vacuous join
if not self.stack.main:
emsg = "Joinent {0}. Invalid vacuous join not main\n".format(self.stack.name)
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
if name in self.stack.nameRemotes: # non ephemeral name match
self.remote = self.stack.nameRemotes[name] # replace so not ephemeral
else: # ephemeral and unique name
self.remote.name = name
self.remote.main = main
self.remote.kind = kind
self.remote.rha = rha
self.remote.role = role
self.remote.verfer = nacling.Verifier(verhex) # verify key manager
self.remote.pubber = nacling.Publican(pubhex) # long term crypt key manager
if self.remote.fuid != reid:
if self.remote.fuid == 0: # vacuous join created remote in stack
self.remote.fuid = reid
else:
emsg = ("Joinent {0}. Mishandled join reid='{1}' != fuid='{2}' for "
"remote {2}\n".format(self.stack.name, reid, self.remote.fuid, name))
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
else: # non vacuous join
if self.remote is not self.stack.remotes[leid]: # something is wrong
emsg = "Joinent {0}. Mishandled join leid '{1}' for remote {2}\n".format(
self.stack.name, leid, name)
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
sameRoleKeys = (role == self.remote.role and
ns2b(verhex) == self.remote.verfer.keyhex and
ns2b(pubhex) == self.remote.pubber.keyhex)
sameAll = (sameRoleKeys and
name == self.remote.name and
rha == self.remote.ha and
reid == self.remote.fuid and
main == self.remote.main and
kind == self.remote.kind)
if not sameAll and not self.stack.mutable:
emsg = ("Joinent {0}. Attempt to change immutable road by "
"'{1}'\n".format(self.stack.name,
self.remote.name))
console.terse(emsg)
# reject not mutable road
self.nack(kind=PcktKind.reject.value)
return
status = self.stack.keep.statusRole(role=role,
verhex=verhex,
pubhex=pubhex,
dump=True)
if status == Acceptance.rejected:
emsg = ("Joinent {0}. Keys of role='{1}' rejected for remote name='{2}'"
" nuid='{3}' fuid='{4}' ha='{5}'\n".format(self.stack.name,
self.remote.role,
self.remote.name,
self.remote.nuid,
self.remote.fuid,
self.remote.ha))
console.concise(emsg)
if sameRoleKeys and self.remote.uid in self.stack.remotes:
self.stack.removeRemote(self.remote, clear=True) #clear remote
# removeRemote also nacks which is a reject
else: # reject as keys rejected
self.nack(kind=PcktKind.reject.value)
return
#accepted or pended
self.remote.acceptance = status
if sameAll: #ephemeral will always be sameAll because assigned above
if self.remote.uid not in self.stack.remotes: # ephemeral
try:
self.stack.addRemote(self.remote)
except raeting.StackError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat(self.statKey())
return
emsg = ("Joinent {0}. Added new remote name='{1}' nuid='{2}' fuid='{3}' "
"ha='{4}' role='{5}'\n".format(self.stack.name,
self.remote.name,
self.remote.nuid,
self.remote.fuid,
self.remote.ha,
self.remote.role))
console.concise(emsg)
# do dump until complete
else: # not sameAll (and mutable)
# do both unique name check first so only change road if new unique
if (name in self.stack.nameRemotes and
self.stack.nameRemotes[name] is not self.remote): # non unique name
emsg = "Joinent {0}. Name '{1}' unavailable for remote {2}\n".format(
self.stack.name, name, self.remote.name)
console.terse(emsg)
self.nack(kind=PcktKind.reject.value)
return
if name != self.remote.name:
try:
self.stack.renameRemote(self.remote, new=name)
except raeting.StackError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat(self.statKey())
return
if rha != self.remote.ha:
self.remote.ha = rha
if reid != self.remote.fuid:
self.remote.fuid = reid
if main != self.remote.main:
self.remote.main = main
if kind != self.remote.kind:
self.remote.kind = kind
if role != self.remote.role: # rerole
self.remote.role = role
if ns2b(verhex) != self.remote.verfer.keyhex:
self.remote.verfer = nacling.Verifier(verhex) # verify key manager
if ns2b(pubhex) != self.remote.pubber.keyhex:
self.remote.pubber = nacling.Publican(pubhex) # long term crypt key manager
# add transaction
self.add(remote=self.remote, index=self.rxPacket.index)
self.remote.joined = None
if status == Acceptance.accepted:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
self.ackAccept()
return
# status == raeting.acceptance.pending or status == None:
self.pendify() # change to ackPend
def pendify(self):
'''
Performing pending operation on remote
'''
self.stack.dumpRemote(self.remote)
self.ackPend()
def ackPend(self):
'''
Send ack to join request
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.pend.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.rxPacket.index)
return
console.concise("Joinent {0}. Do Ack Pending accept of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.transmit(packet)
def ackAccept(self):
'''
Send accept response to join request
'''
if self.stack.kind is None:
self.stack.kind = 0
else:
if self.stack.kind < 0 or self.stack.kind > 255:
emsg = ("Joinent {0}. Invalid application kind field value {1} for {2}. "
"Aborting...\n".format(
self.stack.name,
self.stack.kind,
self.remote.name))
console.concise(emsg)
return
flags = [0, 0, 0, 0, 0, 0, 0, self.stack.main] # stack operation mode flags
operation = packByte(fmt=b'11111111', fields=flags)
body = odict([ ('name', self.stack.local.name),
('mode', operation),
('kind', self.stack.kind),
('uid', self.remote.uid),
('verhex', str(self.stack.local.signer.verhex.decode('ISO-8859-1'))
if self.stack.local.signer.verhex else None ),
('pubhex', str(self.stack.local.priver.pubhex.decode('ISO-8859-1'))
if self.stack.local.priver.pubhex else None),
('role', self.stack.local.role)])
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.response.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.rxPacket.index)
return
console.concise("Joinent {0}. Do Accept of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.transmit(packet)
def pend(self):
'''
Process ack pend to join packet
'''
if not self.stack.parseInner(self.rxPacket):
return
self.pended = True
def complete(self):
'''
process ack to accept response
'''
if not self.stack.parseInner(self.rxPacket):
return
console.concise("Joinent {0}. Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("join_correspond_complete")
if self.remote.sid == 0: # session id must be non-zero after join
self.remote.nextSid() # start new session
self.remote.replaceStaleInitiators()
if self.vacuous:
self.remote.rsid = 0 # reset .rsid on vacuous join so allow will work
self.remote.joined = True # accepted
self.stack.dumpRemote(self.remote)
self.stack.dumpLocal() # persist puid
self.remove(index=self.rxPacket.index)
def reject(self):
'''
Process reject nack because keys rejected
'''
if not self.stack.parseInner(self.rxPacket):
return
console.terse("Joinent {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.remove(index=self.rxPacket.index)
self.stack.removeRemote(self.remote, clear=True)
def refuse(self):
'''
Process refuse nack because join already in progress or stale
'''
if not self.stack.parseInner(self.rxPacket):
return
console.terse("Joinent {0}. Refused by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.remove(index=self.rxPacket.index)
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to join request.
Sometimes nack occurs without remote being added so have to nack using
rxPacket source ha.
'''
#if not self.remote or self.remote.uid not in self.stack.remotes:
#self.txData.update( dh=self.rxPacket.data['sh'], dp=self.rxPacket.data['sp'],)
#ha = (self.rxPacket.data['sh'], self.rxPacket.data['sp'])
#else:
#ha = self.remote.ha
ha = (self.rxPacket.data['sh'], self.rxPacket.data['sp'])
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove(index=self.rxPacket.index)
return
if kind == PcktKind.renew:
console.terse("Joinent {0}. Do Nack Renew of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.refuse:
console.terse("Joinent {0}. Do Nack Refuse of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.reject:
console.terse("Joinent {0}. Do Nack Reject of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Joinent {0}. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, ha, self.tid, self.stack.store.stamp))
else:
console.terse("Joinent {0}. Invalid nack kind {1}. Do Nack of {2} anyway "
" in {3} at {4}\n".format(self.stack.name,
kind,
ha,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.stack.incStat(self.statKey())
self.stack.txes.append((packet.packed, ha))
self.remove(index=self.rxPacket.index)
class Allower(Initiator):
'''
RAET protocol Allower Initiator class Dual of Allowent
CurveCP handshake
'''
Timeout = 4.0
RedoTimeoutMin = 0.25 # initial timeout
RedoTimeoutMax = 1.0 # max timeout
def __init__(self, redoTimeoutMin=None, redoTimeoutMax=None,
cascade=False, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.allow.value
super(Allower, self).__init__(**kwa)
self.cascade = cascade
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.sid = self.remote.sid
self.tid = self.remote.nextTid()
self.oreo = None # cookie from correspondent needed until handshake completed
self.prep() # prepare .txData
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Allower, self).transmit(packet)
self.redoTimer.restart()
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Allower, self).receive(packet) # self.rxPacket = packet
if packet.data['tk'] == TrnsKind.allow:
if packet.data['pk'] == PcktKind.cookie:
self.cookie()
elif packet.data['pk'] == PcktKind.ack:
self.allow()
elif packet.data['pk'] == PcktKind.nack: # rejected
self.refuse()
elif packet.data['pk'] == PcktKind.refuse: # refused
self.refuse()
elif packet.data['pk'] == PcktKind.reject: #rejected
self.reject()
elif packet.data['pk'] == PcktKind.unjoined: # unjoined
self.unjoin()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.remove()
console.concise("Allower {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
# need keep sending join until accepted or timed out
if self.redoTimer.expired:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
if self.txPacket:
if self.txPacket.data['pk'] == PcktKind.hello:
self.transmit(self.txPacket) # redo
console.concise("Allower {0}. Redo Hello with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_hello')
if self.txPacket.data['pk'] == PcktKind.initiate:
self.transmit(self.txPacket) # redo
console.concise("Allower {0}. Redo Initiate with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_initiate')
if self.txPacket.data['pk'] == PcktKind.ack:
self.transmit(self.txPacket) # redo
console.concise("Allower {0}. Redo Ack Final with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_final')
def prep(self):
'''
Prepare .txData
'''
self.txData.update(
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,
)
def hello(self):
'''
Send hello request
'''
joins = self.remote.joinInProcess()
if joins:
emsg = ("Allower {0}. Attempt to allow while join still in process with {1}. "
"Aborting...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('invalid_allow_attempt')
return
allows = self.remote.allowInProcess()
if allows:
emsg = ("Allower {0}. Allow with {1} already in process\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
return
self.remote.allowed = None
if not self.remote.joined:
emsg = "Allower {0}. Must be joined first\n".format(self.stack.name)
console.terse(emsg)
self.stack.incStat('unjoined_remote')
self.stack.join(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
return
self.remote.rekey() # refresh short term keys and reset .allowed to None
self.add()
plain = binascii.hexlify(b''.rjust(32, b'\x00'))
cipher, nonce = self.remote.privee.encrypt(plain, self.remote.pubber.key)
body = raeting.HELLO_PACKER.pack(plain, self.remote.privee.pubraw, cipher, nonce)
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.hello,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Allower {0}. Do Hello with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
def cookie(self):
'''
Process cookie packet
'''
if not self.stack.parseInner(self.rxPacket):
return
data = self.rxPacket.data
body = self.rxPacket.body.data
if not isinstance(body, bytes):
emsg = "Invalid format of cookie packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_cookie')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
if len(body) != raeting.COOKIE_PACKER.size:
emsg = "Invalid length of cookie packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_cookie')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
cipher, nonce = raeting.COOKIE_PACKER.unpack(body)
try:
msg = self.remote.privee.decrypt(cipher, nonce, self.remote.pubber.key)
except ValueError as ex:
emsg = "Invalid cookie stuff: '{0}'\n".format(str(ex))
console.terse(emsg)
self.stack.incStat('invalid_cookie')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
if len(msg) != raeting.COOKIESTUFF_PACKER.size:
emsg = "Invalid length of cookie stuff\n"
console.terse(emsg)
self.stack.incStat('invalid_cookie')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
shortraw, seid, deid, oreo = raeting.COOKIESTUFF_PACKER.unpack(msg)
if seid != self.remote.fuid or deid != self.remote.nuid:
emsg = "Invalid seid or deid fields in cookie stuff\n"
console.terse(emsg)
self.stack.incStat('invalid_cookie')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
self.oreo = binascii.hexlify(oreo)
self.remote.publee = nacling.Publican(key=shortraw)
self.initiate()
def initiate(self):
'''
Send initiate request to cookie response to hello request
'''
vcipher, vnonce = self.stack.local.priver.encrypt(self.remote.privee.pubraw,
self.remote.pubber.key)
fqdn = self.remote.fqdn
if isinstance(fqdn, unicode):
fqdn = fqdn.encode('ascii', 'ignore')
fqdn = fqdn.ljust(128, b' ')[:128]
stuff = raeting.INITIATESTUFF_PACKER.pack(self.stack.local.priver.pubraw,
vcipher,
vnonce,
fqdn)
cipher, nonce = self.remote.privee.encrypt(stuff, self.remote.publee.key)
oreo = binascii.unhexlify(self.oreo)
body = raeting.INITIATE_PACKER.pack(self.remote.privee.pubraw,
oreo,
cipher,
nonce)
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.initiate,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Allower {0}. Do Initiate with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
def allow(self):
'''
Process ackInitiate packet
Perform allowment in response to ack to initiate packet
Transmits ack to complete transaction so correspondent knows
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.allowed = True
self.remote.alived = True # fast alive as soon as allowed
self.ackFinal()
def ackFinal(self):
'''
Send ack to ack Initiate to terminate transaction
This is so both sides wait on acks so transaction is not restarted until
boths sides see completion.
'''
body = b''
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.ack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.remove()
self.transmit(packet)
console.concise("Allower {0}. Do Ack Final, Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("allow_initiate_complete")
self.remote.nextSid() # start new session always on successful allow
self.remote.replaceStaleInitiators()
self.stack.dumpRemote(self.remote)
self.remote.sendSavedMessages() # could include messages saved on rejoin
if self.cascade:
self.stack.alive(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to accept response
'''
body = b''
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
if kind == PcktKind.refuse:
console.terse("Allower {0}. Do Nack Refuse of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.reject:
console.terse("Allower {0}. Do Nack Reject of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Allower {0}. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
else:
console.terse("Allower {0}. Invalid nack kind {1}. Do Nack of {2} anyway "
" in {3} at {4}\n".format(self.stack.name,
kind,
self.remote.name,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.remove()
self.stack.incStat(self.statKey())
self.transmit(packet)
def refuse(self):
'''
Process nack refuse to packet
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remove()
console.concise("Allower {0}. Refused by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def reject(self):
'''
Process nack reject to packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.allowed = False
self.remove()
console.concise("Allower {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def unjoin(self):
'''
Process unjoin packet
terminate in response to unjoin
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.joined = False
self.remove()
console.concise("Allower {0}. Rejected unjoin by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.stack.join(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
class Allowent(Correspondent):
'''
RAET protocol Allowent Correspondent class Dual of Allower
CurveCP handshake
'''
Timeout = 4.0
RedoTimeoutMin = 0.25 # initial timeout
RedoTimeoutMax = 1.0 # max timeout
def __init__(self, redoTimeoutMin=None, redoTimeoutMax=None, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.allow.value
super(Allowent, self).__init__(**kwa)
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.oreo = None #keep locally generated oreo around for redos
self.prep() # prepare .txData
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Allowent, self).transmit(packet)
self.redoTimer.restart()
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Allowent, self).receive(packet) # self.rxPacket = packet
if packet.data['tk'] == TrnsKind.allow:
if packet.data['pk'] == PcktKind.hello:
self.hello()
elif packet.data['pk'] == PcktKind.initiate:
self.initiate()
elif packet.data['pk'] == PcktKind.ack:
self.final()
elif packet.data['pk'] == PcktKind.nack: # rejected
self.refuse()
elif packet.data['pk'] == PcktKind.refuse: # refused
self.refuse()
elif packet.data['pk'] == PcktKind.reject: # rejected
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.nack(kind=PcktKind.refuse.value)
console.concise("Allowent {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
# need to perform the check for accepted status and then send accept
if self.redoTimer.expired:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
if self.txPacket:
if self.txPacket.data['pk'] == PcktKind.cookie:
self.transmit(self.txPacket) #redo
console.concise("Allowent {0}. Redo Cookie with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_cookie')
if self.txPacket.data['pk'] == PcktKind.ack:
self.transmit(self.txPacket) #redo
console.concise("Allowent {0}. Redo Ack with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_allow')
def prep(self):
'''
Prepare .txData
'''
self.txData.update( #sh=self.stack.local.ha[0],
#sp=self.stack.local.ha[1],
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid, )
def hello(self):
'''
Process hello packet
'''
if not self.stack.parseInner(self.rxPacket):
return
joins = self.remote.joinInProcess()
if joins:
emsg = ("Allowent {0}. Attempt to allow while join already in process with {1}. "
"Aborting...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('invalid_allow_attempt')
self.nack(kind=PcktKind.refuse.value)
allows = self.remote.allowInProcess()
for allow in allows:
if allow is self:
emsg = ("Allowent {0}. Duplicate allow hello from {1}. "
"Dropping...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('duplicate_allow_attempt')
return
if allow.rmt: # is already a correspondent to an allow
emsg = ("Allowent {0}. Another allowent already in process with {1}. "
"Aborting...\n".format(self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('redundant_allow_attempt')
self.nack(kind=PcktKind.refuse.value)
return
else: # already initiator allow in process, resolve race condition
if self.stack.local.name < self.remote.name: # abort correspondent
emsg = ("Allowent {0}. Already initiated allow with {1}. "
"Aborting because lesser local name...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
self.stack.incStat('redundant_allow_attempt')
self.nack(kind=PcktKind.refuse.value)
return
else: # abort initiator, could let otherside nack do this
emsg = ("Allowent {0}. Removing initiator allow with {1}. "
"Proceeding because lesser local name...\n".format(
self.stack.name, self.remote.name))
console.concise(emsg)
allow.nack(kind=PcktKind.refuse.value)
self.remote.allowed = None
if not self.remote.joined:
emsg = "Allowent {0}. Must be joined with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unjoined_allow_attempt')
self.nack(kind=PcktKind.unjoined.value)
return
self.remote.rekey() # refresh short term keys and .allowed
self.add()
data = self.rxPacket.data
body = self.rxPacket.body.data
if not isinstance(body, bytes):
emsg = "Invalid format of hello packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_hello')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
if len(body) != raeting.HELLO_PACKER.size:
emsg = "Invalid length of hello packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_hello')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
plain, shortraw, cipher, nonce = raeting.HELLO_PACKER.unpack(body)
self.remote.publee = nacling.Publican(key=shortraw)
msg = self.stack.local.priver.decrypt(cipher, nonce, self.remote.publee.key)
if msg != plain :
emsg = "Invalid plain not match decrypted cipher\n"
console.terse(emsg)
self.stack.incStat('invalid_hello')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
self.cookie()
def cookie(self):
'''
Send Cookie Packet
'''
oreo = self.stack.local.priver.nonce()
self.oreo = binascii.hexlify(oreo)
stuff = raeting.COOKIESTUFF_PACKER.pack(self.remote.privee.pubraw,
self.remote.nuid,
self.remote.fuid,
oreo)
cipher, nonce = self.stack.local.priver.encrypt(stuff, self.remote.publee.key)
body = raeting.COOKIE_PACKER.pack(cipher, nonce)
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.cookie.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Allowent {0}. Do Cookie with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
def initiate(self):
'''
Process initiate packet
'''
if not self.stack.parseInner(self.rxPacket):
return
data = self.rxPacket.data
body = self.rxPacket.body.data
if not isinstance(body, bytes):
emsg = "Invalid format of initiate packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
if len(body) != raeting.INITIATE_PACKER.size:
emsg = "Invalid length of initiate packet body\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
shortraw, oreo, cipher, nonce = raeting.INITIATE_PACKER.unpack(body)
if shortraw != self.remote.publee.keyraw:
emsg = "Mismatch of short term public key in initiate packet\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
if (binascii.hexlify(oreo) != self.oreo):
emsg = "Stale or invalid cookie in initiate packet\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
msg = self.remote.privee.decrypt(cipher, nonce, self.remote.publee.key)
if len(msg) != raeting.INITIATESTUFF_PACKER.size:
emsg = "Invalid length of initiate stuff\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
pubraw, vcipher, vnonce, fqdn = raeting.INITIATESTUFF_PACKER.unpack(msg)
if pubraw != self.remote.pubber.keyraw:
emsg = "Mismatch of long term public key in initiate stuff\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
fqdn = fqdn.rstrip(b' ')
lfqdn = self.stack.local.fqdn
if isinstance(lfqdn, unicode):
lfqdn = lfqdn.encode('ascii', 'ignore')
lfqdn = lfqdn.ljust(128, b' ')[:128].rstrip(b' ')
if fqdn != lfqdn:
emsg = ("Mismatch of local fqdn {0} with rxed fqdn {1} in initiate "
"stuff\n".format(lfqdn, fqdn))
console.terse(emsg)
#self.stack.incStat('invalid_initiate')
#self.remove()
#self.nack(kind=raeting.pcktKinds.reject)
#return
vouch = self.stack.local.priver.decrypt(vcipher, vnonce, self.remote.pubber.key)
if vouch != self.remote.publee.keyraw or vouch != shortraw:
emsg = "Short term key vouch failed\n"
console.terse(emsg)
self.stack.incStat('invalid_initiate')
#self.remove()
self.nack(kind=PcktKind.reject.value)
return
self.ackInitiate()
def ackInitiate(self):
'''
Send ack to initiate request
'''
body = b''
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.ack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Allowent {0}. Do Ack Initiate with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.allow()
def allow(self):
'''
Perform allowment
'''
self.remote.allowed = True
self.remote.alived = True # Fast alived as soon as allowed
self.remote.nextSid() # start new session always on successful allow
self.remote.replaceStaleInitiators()
self.stack.dumpRemote(self.remote)
def final(self):
'''
Process ackFinal packet
So that both sides are waiting on acks at the end so does not restart
transaction if ack initiate is dropped
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remove()
console.concise("Allowent {0}. Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("allow_correspond_complete")
self.remote.sendSavedMessages() # could include messages saved on rejoin
def refuse(self):
'''
Process nack refuse packet
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remove()
console.concise("Allowent {0}. Refused by {1} in {2} at {3}n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def reject(self):
'''
Process nack packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.allowed = False
self.remove()
console.concise("Allowent {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to terminate allow transaction
'''
body = b''
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
if kind==PcktKind.refuse:
console.terse("Allowent {0}. Do Nack Refuse of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind==PcktKind.reject:
console.concise("Allowent {0}. Do Nack Reject {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind==PcktKind.unjoined:
console.concise("Allowent {0}. Do Nack Unjoined {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Allowent {0}. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
else:
console.terse("Allowent {0}. Invalid nack kind {1}. Do Nack of {2} anyway "
" in {3} at {4}\n".format(self.stack.name,
kind,
self.remote.name,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.remove()
self.transmit(packet)
self.stack.incStat(self.statKey())
class Aliver(Initiator):
'''
RAET protocol Aliver Initiator class Dual of Alivent
Sends keep alive heatbeat messages to detect presence
update alived status of .remote
only use .remote.refresh to update
'''
Timeout = 2.0
RedoTimeoutMin = 0.25 # initial timeout
RedoTimeoutMax = 1.0 # max timeout
def __init__(self, redoTimeoutMin=None, redoTimeoutMax=None,
cascade=False, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.alive.value
super(Aliver, self).__init__(**kwa)
self.cascade = cascade
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.sid = self.remote.sid
self.tid = self.remote.nextTid()
self.prep() # prepare .txData
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Aliver, self).transmit(packet)
self.redoTimer.restart()
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Aliver, self).receive(packet)
if packet.data['tk'] == TrnsKind.alive:
if packet.data['pk'] == PcktKind.ack:
self.complete()
elif packet.data['pk'] == PcktKind.nack: # refused
self.refuse()
elif packet.data['pk'] == PcktKind.refuse: # refused
self.refuse()
elif packet.data['pk'] == PcktKind.unjoined: # unjoin
self.unjoin()
elif packet.data['pk'] == PcktKind.unallowed: # unallow
self.unallow()
elif packet.data['pk'] == PcktKind.reject: # rejected
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
console.concise("Aliver {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.remove()
self.remote.refresh(alived=False) # mark as dead
return
# need keep sending message until completed or timed out
if self.redoTimer.expired:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
if self.txPacket:
if self.txPacket.data['pk'] == PcktKind.request:
self.transmit(self.txPacket) # redo
console.concise("Aliver {0}. Redo with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat('redo_alive')
def prep(self):
'''
Prepare .txData
'''
self.txData.update( #sh=self.stack.local.ha[0],
#sp=self.stack.local.ha[1],
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,)
def alive(self, body=None):
'''
Send message
'''
if not self.remote.joined:
emsg = "Aliver {0}. Must be joined with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unjoined_remote')
self.stack.join(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
return
if not self.remote.allowed:
emsg = "Aliver {0}. Must be allowed with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unallowed_remote')
self.stack.allow(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
return
self.remote.refresh(alived=None) #Restart timer but do not change alived status
self.add()
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.request.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Aliver {0}. Do Alive with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
def complete(self):
'''
Process ack packet. Complete transaction and remove
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True) # restart timer mark as alive
self.remove()
console.concise("Aliver {0}. Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("alive_complete")
def refuse(self):
'''
Process nack refuse packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=None) # restart timer do not change status
self.remove()
console.concise("Aliver {0}. Refused by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def reject(self):
'''
Process nack reject packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=False) # restart timer set status to False
self.remove()
console.concise("Aliver {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def unjoin(self):
'''
Process unjoin packet
terminate in response to unjoin
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=None) # restart timer do not change status
self.remote.joined = False
self.remove()
console.concise("Aliver {0}. Refused unjoin by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.stack.join(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
def unallow(self):
'''
Process unallow nack packet
terminate in response to unallow
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=None) # restart timer do not change status
self.remote.allowed = False
self.remove()
console.concise("Aliver {0}. Refused unallow by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
self.stack.allow(uid=self.remote.uid, cascade=self.cascade, timeout=self.timeout)
class Alivent(Correspondent):
'''
RAET protocol Alivent Correspondent class Dual of Aliver
Keep alive heartbeat
'''
Timeout = 10.0
def __init__(self, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.alive.value
super(Alivent, self).__init__(**kwa)
self.prep() # prepare .txData
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Alivent, self).receive(packet)
if packet.data['tk'] == TrnsKind.alive:
if packet.data['pk'] == PcktKind.request:
self.alive()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.nack() #manage restarts alive later
console.concise("Alivent {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
def prep(self):
'''
Prepare .txData
'''
self.txData.update( #sh=self.stack.local.ha[0],
#sp=self.stack.local.ha[1],
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,)
def alive(self):
'''
Process alive packet
'''
if not self.stack.parseInner(self.rxPacket):
return
if not self.remote.joined:
self.remote.refresh(alived=None) # received signed packet so its alive
emsg = "Alivent {0}. Must be joined with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unjoined_alive_attempt')
self.nack(kind=PcktKind.unjoined.value)
return
if not self.remote.allowed:
self.remote.refresh(alived=None) # received signed packet so its alive
emsg = "Alivent {0}. Must be allowed with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unallowed_alive_attempt')
self.nack(kind=PcktKind.unallowed.value)
return
self.add()
data = self.rxPacket.data
body = self.rxPacket.body.data
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.ack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
console.concise("Alivent {0}. Do ack alive with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.remote.refresh(alived=True)
self.remove()
console.concise("Alivent {0}. Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("alive_complete")
def nack(self, kind=PcktKind.nack.value):
'''
Send nack to terminate alive transaction
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=kind,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
if kind == PcktKind.refuse:
console.terse("Alivent {0}. Do Refuse of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.unjoined:
console.terse("Alivent {0}. Do Unjoined of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.unallowed:
console.terse("Alivent {0}. Do Unallowed of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.reject:
console.concise("Alivent {0}. Do Reject {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
elif kind == PcktKind.nack:
console.terse("Alivent {0}. Do Nack of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
else:
console.terse("Alivent {0}. Invalid nack kind {1}. Do Nack of {2} anyway "
" in {3} at {4}\n".format(self.stack.name,
kind,
self.remote.name,
self.tid,
self.stack.store.stamp))
kind == PcktKind.nack
self.transmit(packet)
self.remove()
self.stack.incStat(self.statKey())
class Messenger(Initiator):
'''
RAET protocol Messenger Initiator class Dual of Messengent
Generic messages
'''
Timeout = 0.0
RedoTimeoutMin = 0.2 # initial timeout
RedoTimeoutMax = 0.5 # max timeout
def __init__(self, redoTimeoutMin=None, redoTimeoutMax=None, burst=0, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.message.value
super(Messenger, self).__init__(**kwa)
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.burst = max(0, int(burst)) # BurstSize
self.misseds = oset() # ordered set of currently missed segments
self.acked = False # Have received at least one ack
self.sid = self.remote.sid
self.tid = self.remote.nextTid()
self.prep() # prepare .txData
self.tray = packeting.TxTray(stack=self.stack)
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Messenger, self).transmit(packet)
self.redoTimer.restart()
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Messenger, self).receive(packet)
if packet.data['tk'] == TrnsKind.message:
if packet.data['pk'] == PcktKind.ack: # more
self.acked = True
self.another() # continue message
elif packet.data['pk'] == PcktKind.resend: # resend
self.acked = True
self.resend() # resend missed segments
elif packet.data['pk'] == PcktKind.done: # completed
self.acked = True
self.complete()
elif packet.data['pk'] == PcktKind.nack: # rejected
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.remove()
console.concise("Messenger {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
# keep sending message until completed or timed out
if self.redoTimer.expired:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
if self.txPacket:
if self.txPacket.data['pk'] in [PcktKind.message]:
if self.acked and not self.txPacket.data['af']: # turn on AgnFlag if not set
self.txPacket.data.update(af=True)
self.txPacket.repack()
self.transmit(self.txPacket) # redo
console.concise("Messenger {0}. Redo Segment {1} with "
"{2} in {3} at {4}\n".format(
self.stack.name,
self.txPacket.data['sn'],
self.remote.name,
self.tid,
self.stack.store.stamp))
self.stack.incStat('redo_segment')
def prep(self):
'''
Prepare .txData
'''
self.txData.update( #sh=self.stack.local.ha[0],
#sp=self.stack.local.ha[1],
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
si=self.sid,
ti=self.tid,)
def message(self, body=None):
'''
Send message or part of message. So repeatedly called until complete
'''
if not self.remote.allowed:
emsg = "Messenger {0}. Must be allowed with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unallowed_remote')
self.remove()
return
if not self.tray.packets:
try:
self.tray.pack(data=self.txData, body=body)
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
if self.tray.current >= len(self.tray.packets):
emsg = "Messenger {0}. Current packet {1} greater than num packets {2}\n".format(
self.stack.name, self.tray.current, len(self.tray.packets))
console.terse(emsg)
self.remove()
return
if self.index not in self.remote.transactions:
self.add()
elif self.remote.transactions[self.index] != self:
emsg = "Messenger {0}. Remote {1} Index collision of {2} in {3} at {4}\n".format(
self.stack.name,
self.remote.name,
self.index,
self.tid,
self.stack.store.stamp)
console.terse(emsg)
self.incStat('message_index_collision')
self.remove()
return
burst = (min(self.burst, (len(self.tray.packets) - self.tray.current))
if self.burst else (len(self.tray.packets) - self.tray.current))
packets = self.tray.packets[self.tray.current:self.tray.current + burst]
if packets:
last = packets[-1]
last.data.update(wf=True) # set wait flag on last packet in burst
last.repack()
for packet in packets:
self.transmit(packet)
self.tray.last = self.tray.current
self.tray.current += 1
self.stack.incStat("message_segment_tx")
console.concise("Messenger {0}. Do Message Segment {1} with {2} in {3} at {4}\n".format(
self.stack.name, self.tray.last, self.remote.name, self.tid, self.stack.store.stamp))
def another(self):
'''
Process ack packet and continue sending
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True)
self.stack.incStat("message_ack_rx")
if self.misseds:
self.sendMisseds()
else:
current = self.rxPacket.data['sn'] + 1
if self.tray.current > current:
console.concise("Messenger {0}. Current {1} is ahead of requested {2}. Adjust.\n".format(
self.stack.name, self.tray.current, current))
self.tray.current = current
self.tray.last = current - 1
if self.tray.current < len(self.tray.packets):
self.message() # continue message
def resend(self):
'''
Process resend packet and update .misseds list of missing packets
Then send misseds
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True)
self.stack.incStat('message_resend_rx')
data = self.rxPacket.data
body = self.rxPacket.body.data
misseds = body.get('misseds') # indexes of missed segments
if misseds:
if not self.tray.packets:
emsg = "Invalid resend request '{0}'\n".format(misseds)
console.terse(emsg)
self.stack.incStat('invalid_resend')
return
for m in misseds:
try:
packet = self.tray.packets[m]
except IndexError as ex:
#console.terse(str(ex) + '\n')
console.terse("Invalid misseds segment number {0}\n".format(m))
self.stack.incStat("invalid_misseds")
return
self.misseds.add(packet) # add segment, set only adds if unique
self.sendMisseds()
def sendMisseds(self):
'''
Send a burst of missed packets
'''
if self.misseds:
burst = (min(self.burst, (len(self.misseds))) if
self.burst else len(self.misseds))
# make list of first burst number of packets
misseds = [missed for missed in self.misseds][:burst]
for packet in misseds[:-1]:
repack = False
if not packet.data['af']: # turn on again flag if not set
packet.data.update(af=True)
repack = True
if packet.data['wf']: # turn off wait flag if set
packet.data.update(wf=False)
repack = True
if repack:
packet.repack()
for packet in misseds[-1:]: # last packet
repack = False
if not packet.data['af']: # turn on again flag is not set
packet.data.update(af=True)
repack = True
if not packet.data['wf']: # turn on wait flag if not set
packet.data.update(wf=True)
repack = True
if repack:
packet.repack()
for packet in misseds:
self.transmit(packet)
self.stack.incStat("message_segment_tx")
console.concise("Messenger {0}. Do Resend Message Segment "
"{1} with {2} in {3} at {4}\n".format(
self.stack.name,
packet.data['sn'],
self.remote.name,
self.tid,
self.stack.store.stamp))
self.misseds.discard(packet) # remove from self.misseds
def complete(self):
'''
Process Done Ack
Complete transaction and remove
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True)
self.stack.incStat('message_complete_rx')
self.remove()
console.concise("Messenger {0}. Done with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("message_initiate_complete")
def reject(self):
'''
Process nack packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True)
self.stack.incStat('message_reject_rx')
self.remove()
console.concise("Messenger {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def nack(self):
'''
Send nack to terminate transaction
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.nack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
self.stack.incStat('message_nack_tx')
self.remove()
console.concise("Messenger {0}. Do Nack Reject of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
class Messengent(Correspondent):
'''
RAET protocol Messengent Correspondent class Dual of Messenger
Generic Messages
'''
Timeout = 0.0
RedoTimeoutMin = 0.2 # initial timeout
RedoTimeoutMax = 0.5 # max timeout
def __init__(self, redoTimeoutMin=None, redoTimeoutMax=None, **kwa):
'''
Setup instance
'''
kwa['kind'] = TrnsKind.message.value
super(Messengent, self).__init__(**kwa)
self.redoTimeoutMax = redoTimeoutMax or self.RedoTimeoutMax
self.redoTimeoutMin = redoTimeoutMin or self.RedoTimeoutMin
self.redoTimer = StoreTimer(self.stack.store,
duration=self.redoTimeoutMin)
self.wait = False # wf wait flag
self.lowest = None
self.prep() # prepare .txData
self.tray = packeting.RxTray(stack=self.stack)
def transmit(self, packet):
'''
Augment transmit with restart of redo timer
'''
super(Messengent, self).transmit(packet)
self.redoTimer.restart()
def receive(self, packet):
"""
Process received packet belonging to this transaction
"""
super(Messengent, self).receive(packet)
# resent message
if packet.data['tk'] == TrnsKind.message:
if packet.data['pk'] == PcktKind.message:
self.message()
elif packet.data['pk'] == PcktKind.nack: # rejected
self.reject()
def process(self):
'''
Perform time based processing of transaction
'''
if self.timeout > 0.0 and self.timer.expired:
self.nack()
console.concise("Messengent {0}. Timed out with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
return
if self.redoTimer.expired:
duration = min(
max(self.redoTimeoutMin,
self.redoTimer.duration * 2.0),
self.redoTimeoutMax)
self.redoTimer.restart(duration=duration)
if self.tray.complete:
self.complete()
else:
misseds = self.tray.missing(begin=self.lowest)
if misseds: # resent missed segments
self.lowest = misseds[0]
self.resend(misseds)
else: # always ask for more here
self.ack()
def prep(self):
'''
Prepare .txData
'''
self.txData.update( #sh=self.stack.local.ha[0],
#sp=self.stack.local.ha[1],
dh=self.remote.ha[0], # maybe needed for index
dp=self.remote.ha[1], # maybe needed for index
se=self.remote.nuid,
de=self.remote.fuid,
tk=self.kind,
cf=self.rmt,
bf=self.bcst,
wf=self.rxPacket.data['wf'], # was self.wait
si=self.sid,
ti=self.tid,
ck=self.rxPacket.data['ck'], # so acks use same coat kind encrypted
fk=self.rxPacket.data['fk'], # so acks use same foot kind signed
)
def message(self):
'''
Process message packet. Called repeatedly for each packet in message
'''
if not self.remote.allowed:
emsg = "Messengent {0}. Must be allowed with {1} first\n".format(
self.stack.name, self.remote.name)
console.terse(emsg)
self.stack.incStat('unallowed_message_attempt')
self.nack()
return
try:
body = self.tray.parse(self.rxPacket)
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.incStat('parsing_message_error')
self.nack()
return
if self.index not in self.remote.transactions:
self.add()
elif self.remote.transactions[self.index] != self:
emsg = "Messengent {0}. Remote {1} Index collision of {2} in {3} at {4}\n".format(
self.stack.name,
self.remote.name,
self.index,
self.tid,
self.stack.store.stamp)
console.terse(emsg)
self.incStat('message_index_collision')
self.nack()
return
self.remote.refresh(alived=True)
self.stack.incStat("message_segment_rx")
self.wait = self.rxPacket.data['wf'] # sender is waiting for ack
if self.tray.complete:
self.complete()
elif self.wait: # ask for more if sender waiting for ack
misseds = self.tray.missing(begin=self.lowest)
if misseds: # resent missed segments
self.lowest = misseds[0]
self.resend(misseds)
else:
self.ack()
def ack(self):
'''
Send ack to message
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.ack.value,
embody=body,
data=self.txData)
packet.data['sn'] = self.tray.highest
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
self.stack.incStat("message_more_ack")
console.concise("Messengent {0}. Do Ack More from {1} on Segment {2} with {3} in {4} at {5}\n".format(
self.stack.name,
self.tray.highest + 1,
self.rxPacket.data['sn'],
self.remote.name,
self.tid,
self.stack.store.stamp))
def resend(self, misseds):
'''
Send resend request(s) for missing packets
'''
while misseds:
if len(misseds) > raeting.MAX_MISSEDS_RESEND: # was 64
remainders = misseds[raeting.MAX_MISSEDS_RESEND:] # only do at most 64 at a time
misseds = misseds[:raeting.MAX_MISSEDS_RESEND]
else:
remainders = []
body = odict(misseds=misseds)
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.resend.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
self.stack.incStat("message_resend_tx")
console.concise("Messengent {0}. Do Resend Segments {1} with {2} in {3} at {4}\n".format(
self.stack.name,
misseds,
self.remote.name,
self.tid,
self.stack.store.stamp))
misseds = remainders
def complete(self):
'''
Complete transaction and remove
'''
self.done()
console.verbose("{0} received message body\n{1}\n".format(
self.stack.name, self.tray.body))
# application layer authorizaiton needs to know who sent the message
self.stack.rxMsgs.append((self.tray.body, self.remote.name))
self.remove()
console.concise("Messengent {0}. Complete with {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat("messengent_correspond_complete")
def done(self):
'''
Send done ack to complete message
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.done.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
self.stack.incStat("message_complete_ack")
console.concise("Messengent {0}. Do Ack Done Message on Segment {1} with {2} in {3} at {4}\n".format(
self.stack.name,
self.rxPacket.data['sn'],
self.remote.name,
self.tid,
self.stack.store.stamp))
def reject(self):
'''
Process nack packet
terminate in response to nack
'''
if not self.stack.parseInner(self.rxPacket):
return
self.remote.refresh(alived=True)
self.stack.incStat("message_reject_nack")
self.remove()
console.concise("Messengent {0}. Rejected by {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def nack(self):
'''
Send nack to terminate messenger transaction
'''
body = odict()
packet = packeting.TxPacket(stack=self.stack,
kind=PcktKind.nack.value,
embody=body,
data=self.txData)
try:
packet.pack()
except raeting.PacketError as ex:
console.terse(str(ex) + '\n')
self.stack.incStat("packing_error")
self.remove()
return
self.transmit(packet)
self.remove()
console.concise("Messagent {0}. Do Nack Reject of {1} in {2} at {3}\n".format(
self.stack.name, self.remote.name, self.tid, self.stack.store.stamp))
self.stack.incStat(self.statKey())
def remove(self, remote=None, index=None):
self.remote.addDoneTransaction(self.tid)
super(Messengent, self).remove(remote, index)
| 39.367328
| 110
| 0.522571
| 13,969
| 128,928
| 4.800558
| 0.047892
| 0.07301
| 0.035551
| 0.031972
| 0.790662
| 0.758466
| 0.73073
| 0.703798
| 0.68811
| 0.668232
| 0
| 0.009254
| 0.371386
| 128,928
| 3,274
| 111
| 39.379352
| 0.818165
| 0.114537
| 0
| 0.752336
| 0
| 0.005098
| 0.103189
| 0.006082
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051827
| false
| 0.00085
| 0.007222
| 0
| 0.13339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0e73f3677b1e785639627716428a32dbaf7c4566
| 99
|
py
|
Python
|
networks/__init__.py
|
flyingjam/faces
|
e2bb4c66f7ef8c453911ac572ac49c68e0bb0267
|
[
"BSD-3-Clause"
] | null | null | null |
networks/__init__.py
|
flyingjam/faces
|
e2bb4c66f7ef8c453911ac572ac49c68e0bb0267
|
[
"BSD-3-Clause"
] | null | null | null |
networks/__init__.py
|
flyingjam/faces
|
e2bb4c66f7ef8c453911ac572ac49c68e0bb0267
|
[
"BSD-3-Clause"
] | null | null | null |
from networks.network import Network, Result
from networks.VAE import Encoder, Decoder, VAENetwork
| 33
| 53
| 0.838384
| 13
| 99
| 6.384615
| 0.692308
| 0.289157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 99
| 2
| 54
| 49.5
| 0.943182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0e842bec301eea7276bd3dab5481855ccbcc1688
| 114
|
py
|
Python
|
tests/arbpack/arbmod.py
|
mofm/pypyr
|
f417f69ba9a607d8a93019854105cfbc4dc9c36d
|
[
"Apache-2.0"
] | 261
|
2020-08-18T19:31:29.000Z
|
2022-03-31T14:54:06.000Z
|
tests/arbpack/arbmod.py
|
mofm/pypyr
|
f417f69ba9a607d8a93019854105cfbc4dc9c36d
|
[
"Apache-2.0"
] | 89
|
2017-04-12T09:50:32.000Z
|
2020-08-13T13:18:36.000Z
|
tests/arbpack/arbmod.py
|
mofm/pypyr
|
f417f69ba9a607d8a93019854105cfbc4dc9c36d
|
[
"Apache-2.0"
] | 15
|
2020-09-30T12:15:50.000Z
|
2022-03-30T07:25:40.000Z
|
"""arbmod.py just a sample module for use with tests."""
def arbmod_attribute():
"""Do nothing."""
pass
| 16.285714
| 56
| 0.631579
| 16
| 114
| 4.4375
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 114
| 6
| 57
| 19
| 0.788889
| 0.54386
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0e9f8381ab15a9ae74c9803d678bc9358dac7915
| 172
|
py
|
Python
|
src/grokcore/component/tests/adapter/modulecontextmultiple_fixture.py
|
bielbienne/grokcore.component
|
575276af7ef7575698c497a21f4ae7732fdc61bd
|
[
"ZPL-2.1"
] | null | null | null |
src/grokcore/component/tests/adapter/modulecontextmultiple_fixture.py
|
bielbienne/grokcore.component
|
575276af7ef7575698c497a21f4ae7732fdc61bd
|
[
"ZPL-2.1"
] | null | null | null |
src/grokcore/component/tests/adapter/modulecontextmultiple_fixture.py
|
bielbienne/grokcore.component
|
575276af7ef7575698c497a21f4ae7732fdc61bd
|
[
"ZPL-2.1"
] | null | null | null |
import grokcore.component as grok
from zope import interface
class Cave(grok.Context):
pass
class Club(grok.Context):
pass
grok.context(Cave)
grok.context(Club)
| 14.333333
| 33
| 0.755814
| 25
| 172
| 5.2
| 0.52
| 0.338462
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156977
| 172
| 11
| 34
| 15.636364
| 0.896552
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7ebb89a82652e387b00557cac0faf786eb19e210
| 71
|
py
|
Python
|
aesara/typed_list/__init__.py
|
fshart/aesara
|
1ddf96a7b8e8503fb8773b09c3ca77483fd884c4
|
[
"BSD-3-Clause"
] | 111
|
2021-01-29T06:12:58.000Z
|
2021-06-04T20:27:51.000Z
|
aesara/typed_list/__init__.py
|
fshart/aesara
|
1ddf96a7b8e8503fb8773b09c3ca77483fd884c4
|
[
"BSD-3-Clause"
] | 253
|
2020-02-07T15:19:38.000Z
|
2021-01-27T20:26:55.000Z
|
aesara/typed_list/__init__.py
|
fshart/aesara
|
1ddf96a7b8e8503fb8773b09c3ca77483fd884c4
|
[
"BSD-3-Clause"
] | 38
|
2020-07-20T12:09:06.000Z
|
2021-01-27T13:38:50.000Z
|
from . import opt
from .basic import *
from .type import TypedListType
| 17.75
| 31
| 0.774648
| 10
| 71
| 5.5
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 3
| 32
| 23.666667
| 0.932203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7ec846635d722e24d75978374b99bf4891684637
| 248
|
py
|
Python
|
hello_world/my_app/views.py
|
mescutia95/Assignment-2-Django-Hello-World
|
b688361be985f2070b513c21ca4c0cd56de47e27
|
[
"MIT"
] | null | null | null |
hello_world/my_app/views.py
|
mescutia95/Assignment-2-Django-Hello-World
|
b688361be985f2070b513c21ca4c0cd56de47e27
|
[
"MIT"
] | null | null | null |
hello_world/my_app/views.py
|
mescutia95/Assignment-2-Django-Hello-World
|
b688361be985f2070b513c21ca4c0cd56de47e27
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'index.html'
| 27.555556
| 45
| 0.822581
| 32
| 248
| 6.1875
| 0.625
| 0.151515
| 0.191919
| 0.252525
| 0.313131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 248
| 9
| 46
| 27.555556
| 0.912442
| 0.092742
| 0
| 0.333333
| 0
| 0
| 0.044643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7ef08be40aea0c89d6b38760d647d5d7ab1d5713
| 207
|
py
|
Python
|
hooks.py
|
kjb4494/ytbDownloader
|
2f0f1f889a727c4f22ecf799ac36afb0954171ac
|
[
"MIT"
] | null | null | null |
hooks.py
|
kjb4494/ytbDownloader
|
2f0f1f889a727c4f22ecf799ac36afb0954171ac
|
[
"MIT"
] | null | null | null |
hooks.py
|
kjb4494/ytbDownloader
|
2f0f1f889a727c4f22ecf799ac36afb0954171ac
|
[
"MIT"
] | null | null | null |
def finished_hook_for_mp3(d):
if d['status'] == 'finished':
print('다운로드가 완료되었습니다. 컨버팅을 시작합니다.')
def finished_hook_for_mp4(d):
if d['status'] == 'finished':
print('다운로드가 완료되었습니다.')
| 20.7
| 43
| 0.613527
| 28
| 207
| 4.321429
| 0.5
| 0.181818
| 0.247934
| 0.297521
| 0.578512
| 0.578512
| 0.578512
| 0.578512
| 0
| 0
| 0
| 0.012346
| 0.217391
| 207
| 9
| 44
| 23
| 0.734568
| 0
| 0
| 0.333333
| 0
| 0
| 0.330097
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7d05c652af90e1d466e5f49cc5a7709969b28e62
| 118
|
py
|
Python
|
coding/learn_import/future_import.py
|
yatao91/learning_road
|
e88dc43de98e35922bfc71c222ec71766851e618
|
[
"MIT"
] | 3
|
2021-05-25T16:58:52.000Z
|
2022-02-05T09:37:17.000Z
|
coding/learn_import/future_import.py
|
yataosu/learning_road
|
e88dc43de98e35922bfc71c222ec71766851e618
|
[
"MIT"
] | null | null | null |
coding/learn_import/future_import.py
|
yataosu/learning_road
|
e88dc43de98e35922bfc71c222ec71766851e618
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from foo import *
from __future__ import print_function # __future__必须在模块文件的顶层,否则SyntaxError
| 29.5
| 75
| 0.762712
| 14
| 118
| 5.785714
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.135593
| 118
| 3
| 76
| 39.333333
| 0.784314
| 0.474576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
7d2ca2b5d2442b4b3f973c920eb62ff540cf5048
| 7,233
|
py
|
Python
|
reqherd/webservice/alembic/versions/c405c1cc97a3_begin_reqherding.py
|
zthurman/reqherd
|
6b35c4f22d4e28c363f82a5f3331657f8244a589
|
[
"Apache-2.0"
] | null | null | null |
reqherd/webservice/alembic/versions/c405c1cc97a3_begin_reqherding.py
|
zthurman/reqherd
|
6b35c4f22d4e28c363f82a5f3331657f8244a589
|
[
"Apache-2.0"
] | null | null | null |
reqherd/webservice/alembic/versions/c405c1cc97a3_begin_reqherding.py
|
zthurman/reqherd
|
6b35c4f22d4e28c363f82a5f3331657f8244a589
|
[
"Apache-2.0"
] | null | null | null |
"""Begin reqherding
Revision ID: c405c1cc97a3
Revises:
Create Date: 2021-12-18 22:33:29.640060
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "c405c1cc97a3"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"system_requirement",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("definition", sa.Unicode(length=1000), nullable=False),
sa.Column("modified_date", sa.DateTime(), nullable=True),
sa.Column("doc_prefix", sa.Unicode(length=10), nullable=False),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(
op.f("ix_system_requirement_id"), "system_requirement", ["id"], unique=False
)
op.execute(
"create trigger system_requirement_after_update "
"after update on system_requirement "
"begin "
"update system_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.execute(
"create trigger system_requirement_after_insert "
"after insert on system_requirement "
"begin "
"update system_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.create_table(
"hardware_requirement",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("definition", sa.Unicode(length=1000), nullable=False),
sa.Column("modified_date", sa.DateTime(), nullable=True),
sa.Column("doc_prefix", sa.Unicode(length=10), nullable=False),
sa.Column("system_requirement_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["system_requirement_id"],
["system_requirement.id"],
),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(
op.f("ix_hardware_requirement_id"), "hardware_requirement", ["id"], unique=False
)
op.execute(
"create trigger hardware_requirement_after_update "
"after update on hardware_requirement "
"begin "
"update hardware_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.execute(
"create trigger hardware_requirement_after_insert "
"after insert on hardware_requirement "
"begin "
"update hardware_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.create_table(
"software_requirement",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("definition", sa.Unicode(length=1000), nullable=False),
sa.Column("modified_date", sa.DateTime(), nullable=True),
sa.Column("doc_prefix", sa.Unicode(length=10), nullable=False),
sa.Column("system_requirement_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["system_requirement_id"],
["system_requirement.id"],
),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(
op.f("ix_software_requirement_id"), "software_requirement", ["id"], unique=False
)
op.execute(
"create trigger software_requirement_after_update "
"after update on software_requirement "
"begin "
"update software_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.execute(
"create trigger software_requirement_after_insert "
"after insert on software_requirement "
"begin "
"update software_requirement set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.create_table(
"hardware_design_element",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("definition", sa.Unicode(length=1000), nullable=False),
sa.Column("modified_date", sa.DateTime(), nullable=True),
sa.Column("doc_prefix", sa.Unicode(length=10), nullable=False),
sa.Column("hardware_requirement_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["hardware_requirement_id"],
["hardware_requirement.id"],
),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(
op.f("ix_hardware_design_element_id"),
"hardware_design_element",
["id"],
unique=False,
)
op.execute(
"create trigger hardware_design_element_after_update "
"after update on hardware_design_element "
"begin "
"update hardware_design_element set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.execute(
"create trigger hardware_design_element_after_insert "
"after insert on hardware_design_element "
"begin "
"update hardware_design_element set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.create_table(
"software_design_element",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("definition", sa.Unicode(length=1000), nullable=False),
sa.Column("modified_date", sa.DateTime(), nullable=True),
sa.Column("doc_prefix", sa.Unicode(length=10), nullable=False),
sa.Column("software_requirement_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["software_requirement_id"],
["software_requirement.id"],
),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(
op.f("ix_software_design_element_id"),
"software_design_element",
["id"],
unique=False,
)
op.execute(
"create trigger software_design_element_after_update "
"after update on software_design_element "
"begin "
"update software_design_element set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
op.execute(
"create trigger software_design_element_after_insert "
"after insert on software_design_element "
"begin "
"update software_design_element set modified_date = datetime('now') WHERE id = NEW.id;"
"end; "
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(
op.f("ix_software_design_element_id"), table_name="software_design_element"
)
op.drop_table("software_design_element")
op.drop_index(
op.f("ix_hardware_design_element_id"), table_name="hardware_design_element"
)
op.drop_table("hardware_design_element")
op.drop_index(op.f("ix_software_requirement_id"), table_name="software_requirement")
op.drop_table("software_requirement")
op.drop_index(op.f("ix_hardware_requirement_id"), table_name="hardware_requirement")
op.drop_table("hardware_requirement")
op.drop_index(op.f("ix_system_requirement_id"), table_name="system_requirement")
op.drop_table("system_requirement")
# ### end Alembic commands ###
| 36.715736
| 95
| 0.643025
| 810
| 7,233
| 5.498765
| 0.098765
| 0.043107
| 0.063987
| 0.066008
| 0.895599
| 0.873597
| 0.832735
| 0.756399
| 0.624383
| 0.602829
| 0
| 0.011559
| 0.234481
| 7,233
| 196
| 96
| 36.903061
| 0.792848
| 0.039541
| 0
| 0.58427
| 0
| 0
| 0.429667
| 0.179161
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011236
| false
| 0
| 0.011236
| 0
| 0.022472
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7d35559b4e6359e65df476a2b434d12341f4370d
| 55
|
py
|
Python
|
random_no.py
|
Mandar-Gajbhiye15/MLH-LHD-14-Jan
|
d5557fdc05edb24fbbc14c010e6c134340d1d460
|
[
"MIT"
] | null | null | null |
random_no.py
|
Mandar-Gajbhiye15/MLH-LHD-14-Jan
|
d5557fdc05edb24fbbc14c010e6c134340d1d460
|
[
"MIT"
] | null | null | null |
random_no.py
|
Mandar-Gajbhiye15/MLH-LHD-14-Jan
|
d5557fdc05edb24fbbc14c010e6c134340d1d460
|
[
"MIT"
] | null | null | null |
import random
print("Random number: ",random.random())
| 27.5
| 40
| 0.745455
| 7
| 55
| 5.857143
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 55
| 2
| 40
| 27.5
| 0.82
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
ada44450c3bdeeff556f2ebf41ee1c44cf63d87c
| 120
|
py
|
Python
|
imgreco/__init__.py
|
Sait0Yuuki/ArknightsAutoHelper
|
5ecec0d120482c930181346cfdb8542090e169c1
|
[
"MIT"
] | 3
|
2021-02-19T08:41:51.000Z
|
2021-12-19T07:27:32.000Z
|
imgreco/__init__.py
|
Sait0Yuuki/ArknightsAutoHelper
|
5ecec0d120482c930181346cfdb8542090e169c1
|
[
"MIT"
] | null | null | null |
imgreco/__init__.py
|
Sait0Yuuki/ArknightsAutoHelper
|
5ecec0d120482c930181346cfdb8542090e169c1
|
[
"MIT"
] | null | null | null |
from util import image_hash_patch
from . import common, before_operation, end_operation, item, main, task, recruit, map
| 40
| 85
| 0.808333
| 18
| 120
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 2
| 86
| 60
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bc11a5f2ba798bec1bbf0ca1ece2fca6028772d6
| 67
|
py
|
Python
|
bolt/cogs/meta/__init__.py
|
HeavyLobster/bolt
|
d50dd59d119a627bfac32fbed2544433f066399b
|
[
"0BSD"
] | null | null | null |
bolt/cogs/meta/__init__.py
|
HeavyLobster/bolt
|
d50dd59d119a627bfac32fbed2544433f066399b
|
[
"0BSD"
] | null | null | null |
bolt/cogs/meta/__init__.py
|
HeavyLobster/bolt
|
d50dd59d119a627bfac32fbed2544433f066399b
|
[
"0BSD"
] | null | null | null |
from .cog import Meta
def setup(bot):
bot.add_cog(Meta(bot))
| 11.166667
| 26
| 0.671642
| 12
| 67
| 3.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19403
| 67
| 5
| 27
| 13.4
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bc18a428fcdae446f16d0e121e3920b3fa9e403b
| 4,600
|
py
|
Python
|
test/test_list_transactions_by_address_response_item_blockchain_specific.py
|
xan187/Crypto_APIs_2.0_SDK_Python
|
a56c75df54ef037b39be1315ed6e54de35bed55b
|
[
"MIT"
] | null | null | null |
test/test_list_transactions_by_address_response_item_blockchain_specific.py
|
xan187/Crypto_APIs_2.0_SDK_Python
|
a56c75df54ef037b39be1315ed6e54de35bed55b
|
[
"MIT"
] | null | null | null |
test/test_list_transactions_by_address_response_item_blockchain_specific.py
|
xan187/Crypto_APIs_2.0_SDK_Python
|
a56c75df54ef037b39be1315ed6e54de35bed55b
|
[
"MIT"
] | 1
|
2021-07-21T03:35:18.000Z
|
2021-07-21T03:35:18.000Z
|
"""
CryptoAPIs
Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: developers@cryptoapis.io
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import cryptoapis
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_bitcoin import ListTransactionsByAddressResponseItemBlockchainSpecificBitcoin
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_bitcoin_cash import ListTransactionsByAddressResponseItemBlockchainSpecificBitcoinCash
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_dash import ListTransactionsByAddressResponseItemBlockchainSpecificDash
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_dash_vin import ListTransactionsByAddressResponseItemBlockchainSpecificDashVin
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_dash_vout import ListTransactionsByAddressResponseItemBlockchainSpecificDashVout
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_dogecoin import ListTransactionsByAddressResponseItemBlockchainSpecificDogecoin
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_ethereum import ListTransactionsByAddressResponseItemBlockchainSpecificEthereum
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_ethereum_classic import ListTransactionsByAddressResponseItemBlockchainSpecificEthereumClassic
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_ethereum_gas_price import ListTransactionsByAddressResponseItemBlockchainSpecificEthereumGasPrice
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific_litecoin import ListTransactionsByAddressResponseItemBlockchainSpecificLitecoin
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificBitcoin'] = ListTransactionsByAddressResponseItemBlockchainSpecificBitcoin
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificBitcoinCash'] = ListTransactionsByAddressResponseItemBlockchainSpecificBitcoinCash
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificDash'] = ListTransactionsByAddressResponseItemBlockchainSpecificDash
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificDashVin'] = ListTransactionsByAddressResponseItemBlockchainSpecificDashVin
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificDashVout'] = ListTransactionsByAddressResponseItemBlockchainSpecificDashVout
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificDogecoin'] = ListTransactionsByAddressResponseItemBlockchainSpecificDogecoin
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificEthereum'] = ListTransactionsByAddressResponseItemBlockchainSpecificEthereum
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificEthereumClassic'] = ListTransactionsByAddressResponseItemBlockchainSpecificEthereumClassic
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificEthereumGasPrice'] = ListTransactionsByAddressResponseItemBlockchainSpecificEthereumGasPrice
globals()['ListTransactionsByAddressResponseItemBlockchainSpecificLitecoin'] = ListTransactionsByAddressResponseItemBlockchainSpecificLitecoin
from cryptoapis.model.list_transactions_by_address_response_item_blockchain_specific import ListTransactionsByAddressResponseItemBlockchainSpecific
class TestListTransactionsByAddressResponseItemBlockchainSpecific(unittest.TestCase):
"""ListTransactionsByAddressResponseItemBlockchainSpecific unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testListTransactionsByAddressResponseItemBlockchainSpecific(self):
"""Test ListTransactionsByAddressResponseItemBlockchainSpecific"""
# FIXME: construct object with mandatory attributes with example values
# model = ListTransactionsByAddressResponseItemBlockchainSpecific() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 80.701754
| 484
| 0.898261
| 325
| 4,600
| 12.403077
| 0.350769
| 0.038204
| 0.051848
| 0.062764
| 0.214339
| 0.214339
| 0.214339
| 0.214339
| 0.214339
| 0.214339
| 0
| 0.003491
| 0.06587
| 4,600
| 56
| 485
| 82.142857
| 0.934606
| 0.195217
| 0
| 0.090909
| 0
| 0
| 0.177499
| 0.175314
| 0
| 0
| 0
| 0.017857
| 0
| 1
| 0.090909
| false
| 0.090909
| 0.424242
| 0
| 0.545455
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
bc300efa75ea9e9f96e7fd4afba95f0720eb0e10
| 7,725
|
py
|
Python
|
alerta/heartbeat.py
|
been2io/alerta
|
510681bd8fcd5b266fab9c6e00a3abd25591a2b6
|
[
"Apache-2.0"
] | null | null | null |
alerta/heartbeat.py
|
been2io/alerta
|
510681bd8fcd5b266fab9c6e00a3abd25591a2b6
|
[
"Apache-2.0"
] | null | null | null |
alerta/heartbeat.py
|
been2io/alerta
|
510681bd8fcd5b266fab9c6e00a3abd25591a2b6
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
import platform
import time
import datetime
import pytz
import json
from uuid import uuid4
from email import utils
DEFAULT_TIMEOUT = 300 # seconds
prog = os.path.basename(sys.argv[0])
class Heartbeat(object):
def __init__(self, origin=None, tags=None, create_time=None, timeout=None, customer=None):
self.id = str(uuid4())
self.origin = origin or '%s/%s' % (prog, platform.uname()[1])
self.tags = tags or list()
self.event_type = 'Heartbeat'
self.create_time = create_time or datetime.datetime.utcnow()
self.timeout = timeout or DEFAULT_TIMEOUT
self.receive_time = None
self.customer = customer
def get_id(self, short=False):
if short:
return self.id[:8]
else:
return self.id
def get_header(self):
return {
"origin": self.origin,
"type": self.event_type,
"correlation-id": self.id
}
def get_body(self):
return {
'id': self.id,
'origin': self.origin,
'tags': self.tags,
'type': self.event_type,
'createTime': self.get_date('create_time', 'iso'),
'timeout': self.timeout,
'customer': self.customer
}
def get_date(self, attr, fmt='iso', timezone='Europe/London'):
tz = pytz.timezone(timezone)
if hasattr(self, attr):
if fmt == 'local':
return getattr(self, attr).replace(tzinfo=pytz.UTC).astimezone(tz).strftime('%Y/%m/%d %H:%M:%S')
elif fmt == 'iso' or fmt == 'iso8601':
return getattr(self, attr).replace(microsecond=0).isoformat() + ".%03dZ" % (getattr(self, attr).microsecond // 1000)
elif fmt == 'rfc' or fmt == 'rfc2822':
return utils.formatdate(time.mktime(getattr(self, attr).replace(tzinfo=pytz.UTC).timetuple()), True)
elif fmt == 'short':
return getattr(self, attr).replace(tzinfo=pytz.UTC).astimezone(tz).strftime('%a %d %H:%M:%S')
elif fmt == 'epoch':
return time.mktime(getattr(self, attr).replace(tzinfo=pytz.UTC).timetuple())
elif fmt == 'raw':
return getattr(self, attr)
else:
raise ValueError("Unknown date format %s" % fmt)
else:
return ValueError("Attribute %s not a date" % attr)
def get_type(self):
return self.event_type
def receive_now(self):
self.receive_time = datetime.datetime.utcnow()
def __repr__(self):
return 'Heartbeat(id=%r, origin=%r, create_time=%r, timeout=%r, customer=%r)' % (
self.id, self.origin, self.create_time, self.timeout, self.customer)
def __str__(self):
return json.dumps(self.get_body())
@staticmethod
def parse_heartbeat(heartbeat):
try:
if isinstance(heartbeat, bytes):
heartbeat = json.loads(heartbeat.decode('utf-8')) # See https://bugs.python.org/issue10976
else:
heartbeat = json.loads(heartbeat)
except ValueError as e:
raise ValueError('Could not parse heartbeat - %s: %s' % (e, heartbeat))
if heartbeat.get('createTime', None):
try:
heartbeat['createTime'] = datetime.datetime.strptime(heartbeat['createTime'], '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError as e:
raise ValueError('Could not parse date time string: %s' % e)
if heartbeat.get('tags', None):
if not isinstance(heartbeat['tags'], list):
raise ValueError('Attribute must be list: tags')
return Heartbeat(
origin=heartbeat.get('origin', None),
tags=heartbeat.get('tags', list()),
create_time=heartbeat.get('createTime', None),
timeout=heartbeat.get('timeout', None),
customer=heartbeat.get('customer', None)
)
class HeartbeatDocument(object):
def __init__(self, id, origin, tags, event_type, create_time, timeout, receive_time, customer):
self.id = id
self.origin = origin
self.tags = tags
self.event_type = event_type or 'Heartbeat'
self.create_time = create_time or datetime.datetime.utcnow()
self.timeout = timeout or DEFAULT_TIMEOUT
self.receive_time = receive_time
self.customer = customer
def get_id(self, short=False):
if short:
return self.id[:8]
else:
return self.id
def get_header(self):
return {
"origin": self.origin,
"type": self.event_type,
"correlation-id": self.id
}
def get_body(self):
return {
'id': self.id,
'origin': self.origin,
'tags': self.tags,
'type': self.event_type,
'createTime': self.get_date('create_time', 'iso'),
'timeout': self.timeout,
'receiveTime': self.get_date('receive_time', 'iso'),
'customer': self.customer
}
def get_date(self, attr, fmt='iso', timezone='Europe/London'):
tz = pytz.timezone(timezone)
if hasattr(self, attr):
if fmt == 'local':
return getattr(self, attr).replace(tzinfo=pytz.UTC).astimezone(tz).strftime('%Y/%m/%d %H:%M:%S')
elif fmt == 'iso' or fmt == 'iso8601':
return getattr(self, attr).replace(microsecond=0).isoformat() + ".%03dZ" % (getattr(self, attr).microsecond // 1000)
elif fmt == 'rfc' or fmt == 'rfc2822':
return utils.formatdate(time.mktime(getattr(self, attr).replace(tzinfo=pytz.UTC).timetuple()), True)
elif fmt == 'short':
return getattr(self, attr).replace(tzinfo=pytz.UTC).astimezone(tz).strftime('%a %d %H:%M:%S')
elif fmt == 'epoch':
return time.mktime(getattr(self, attr).replace(tzinfo=pytz.UTC).timetuple())
elif fmt == 'raw':
return getattr(self, attr)
else:
raise ValueError("Unknown date format %s" % fmt)
else:
return ValueError("Attribute %s not a date" % attr)
def __repr__(self):
return 'HeartbeatDocument(id=%r, origin=%r, create_time=%r, timeout=%r, customer=%r)' % (
self.id, self.origin, self.create_time, self.timeout, self.customer)
def __str__(self):
return json.dumps(self.get_body())
@staticmethod
def parse_heartbeat(heartbeat):
for k, v in heartbeat.items():
if k in ['createTime', 'receiveTime']:
if '.' in v:
try:
heartbeat[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError as e:
raise ValueError('Could not parse date time string: %s' % e)
else:
try:
heartbeat[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%SZ') # if us = 000000
except ValueError as e:
raise ValueError('Could not parse date time string: %s' % e)
return HeartbeatDocument(
id=heartbeat.get('id', None),
origin=heartbeat.get('origin', None),
tags=heartbeat.get('tags', list()),
event_type=heartbeat.get('type', None),
create_time=heartbeat.get('createTime', None),
timeout=heartbeat.get('timeout', None),
receive_time=heartbeat.get('receiveTime', None),
customer=heartbeat.get('customer', None)
)
| 35.273973
| 132
| 0.559094
| 889
| 7,725
| 4.773903
| 0.150731
| 0.03393
| 0.049482
| 0.051838
| 0.716305
| 0.716305
| 0.700283
| 0.700283
| 0.700283
| 0.689208
| 0
| 0.009529
| 0.307184
| 7,725
| 218
| 133
| 35.43578
| 0.783445
| 0.007896
| 0
| 0.672414
| 0
| 0.011494
| 0.131218
| 0.008617
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0
| 0.051724
| 0.051724
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
70bc7ec837f21d5aac05e7239641f6c3e74beb64
| 47,113
|
py
|
Python
|
mysite/patterns/39.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 19
|
2016-06-17T23:36:27.000Z
|
2020-01-13T16:41:55.000Z
|
mysite/patterns/39.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 13
|
2016-06-06T12:57:05.000Z
|
2019-02-05T02:21:00.000Z
|
patterns/39.py
|
OmnesRes/GRIMMER
|
173c99ebdb6a9edb1242d24a791d0c5d778ff643
|
[
"MIT"
] | 7
|
2017-03-28T18:12:22.000Z
|
2021-06-16T09:32:59.000Z
|
pattern_zero=[0.0, 0.024983563445, 0.048652202498, 0.051282051282, 0.07100591716, 0.076265614727, 0.092044707429, 0.09993425378, 0.102564102564, 0.111768573307, 0.122287968442, 0.127547666009, 0.130177514793, 0.143326758711, 0.147271531887, 0.151216305062, 0.153846153846, 0.163050624589, 0.173570019724, 0.177514792899, 0.178829717291, 0.181459566075, 0.190664036818, 0.194608809993, 0.198553583169, 0.202498356345, 0.205128205128, 0.213017751479, 0.214332675871, 0.222222222222, 0.224852071006, 0.228796844181, 0.230111768573, 0.232741617357, 0.236686390533, 0.2419460881, 0.245890861275, 0.248520710059, 0.249835634451, 0.253780407627, 0.25641025641, 0.264299802761, 0.265614727153, 0.273504273504, 0.276134122288, 0.280078895464, 0.281393819855, 0.284023668639, 0.287968441815, 0.293228139382, 0.297172912558, 0.299802761341, 0.301117685733, 0.305062458909, 0.307692307692, 0.315581854043, 0.316896778435, 0.324786324786, 0.32741617357, 0.331360946746, 0.332675871137, 0.335305719921, 0.339250493097, 0.344510190664, 0.34845496384, 0.351084812623, 0.352399737015, 0.356344510191, 0.358974358974, 0.366863905325, 0.368178829717, 0.376068376068, 0.378698224852, 0.382642998028, 0.383957922419, 0.386587771203, 0.390532544379, 0.395792241946, 0.399737015122, 0.402366863905, 0.403681788297, 0.407626561473, 0.410256410256, 0.418145956607, 0.419460880999, 0.42735042735, 0.429980276134, 0.43392504931, 0.435239973702, 0.437869822485, 0.441814595661, 0.447074293228, 0.451019066404, 0.453648915187, 0.454963839579, 0.458908612755, 0.461538461538, 0.46942800789, 0.470742932281, 0.478632478632, 0.481262327416, 0.485207100592, 0.486522024984, 0.489151873767, 0.493096646943, 0.49835634451, 0.502301117686, 0.504930966469, 0.506245890861, 0.510190664037, 0.512820512821, 0.520710059172, 0.522024983563, 0.529914529915, 0.532544378698, 0.536489151874, 0.537804076266, 0.540433925049, 0.544378698225, 0.549638395792, 0.553583168968, 0.556213017751, 0.557527942143, 0.561472715319, 0.564102564103, 0.571992110454, 0.573307034845, 0.581196581197, 0.58382642998, 0.587771203156, 0.589086127548, 0.591715976331, 0.595660749507, 0.600920447074, 0.60486522025, 0.607495069034, 0.608809993425, 0.612754766601, 0.615384615385, 0.623274161736, 0.624589086128, 0.632478632479, 0.635108481262, 0.639053254438, 0.64036817883, 0.642998027613, 0.646942800789, 0.652202498356, 0.656147271532, 0.658777120316, 0.660092044707, 0.664036817883, 0.666666666667, 0.674556213018, 0.67587113741, 0.683760683761, 0.686390532544, 0.69033530572, 0.691650230112, 0.694280078895, 0.698224852071, 0.703484549638, 0.707429322814, 0.710059171598, 0.711374095989, 0.715318869165, 0.717948717949, 0.7258382643, 0.727153188692, 0.735042735043, 0.737672583826, 0.741617357002, 0.742932281394, 0.745562130178, 0.749506903353, 0.75476660092, 0.758711374096, 0.76134122288, 0.762656147272, 0.766600920447, 0.769230769231, 0.777120315582, 0.778435239974, 0.786324786325, 0.788954635108, 0.792899408284, 0.794214332676, 0.79684418146, 0.800788954635, 0.806048652202, 0.809993425378, 0.812623274162, 0.813938198554, 0.817882971729, 0.820512820513, 0.828402366864, 0.829717291256, 0.837606837607, 0.840236686391, 0.844181459566, 0.845496383958, 0.848126232742, 0.852071005917, 0.857330703485, 0.86127547666, 0.863905325444, 0.865220249836, 0.869165023011, 0.871794871795, 0.879684418146, 0.880999342538, 0.888888888889, 0.891518737673, 0.895463510848, 0.89677843524, 0.899408284024, 0.903353057199, 0.908612754767, 0.912557527942, 0.915187376726, 0.916502301118, 0.920447074293, 0.923076923077, 0.930966469428, 0.93228139382, 0.940170940171, 0.942800788955, 0.94674556213, 0.948060486522, 0.950690335306, 0.954635108481, 0.959894806049, 0.963839579224, 0.966469428008, 0.9677843524, 0.971729125575, 0.974358974359, 0.98224852071, 0.983563445102, 0.991452991453, 0.994082840237, 0.998027613412, 0.999342537804]
pattern_odd=[0.001972386588, 0.005917159763, 0.011176857331, 0.015121630506, 0.01775147929, 0.019066403682, 0.023011176857, 0.025641025641, 0.033530571992, 0.034845496384, 0.042735042735, 0.045364891519, 0.049309664694, 0.050624589086, 0.05325443787, 0.057199211045, 0.062458908613, 0.066403681788, 0.069033530572, 0.070348454964, 0.074293228139, 0.076923076923, 0.084812623274, 0.086127547666, 0.094017094017, 0.096646942801, 0.100591715976, 0.101906640368, 0.104536489152, 0.108481262327, 0.113740959895, 0.11768573307, 0.120315581854, 0.121630506246, 0.125575279421, 0.128205128205, 0.136094674556, 0.137409598948, 0.145299145299, 0.147928994083, 0.151873767258, 0.15318869165, 0.155818540434, 0.159763313609, 0.165023011177, 0.168967784352, 0.171597633136, 0.172912557528, 0.176857330703, 0.179487179487, 0.187376725838, 0.18869165023, 0.196581196581, 0.199211045365, 0.20315581854, 0.204470742932, 0.207100591716, 0.211045364892, 0.216305062459, 0.220249835634, 0.222879684418, 0.22419460881, 0.228139381986, 0.230769230769, 0.23865877712, 0.239973701512, 0.247863247863, 0.250493096647, 0.254437869822, 0.255752794214, 0.258382642998, 0.262327416174, 0.267587113741, 0.271531886917, 0.2741617357, 0.275476660092, 0.279421433268, 0.282051282051, 0.289940828402, 0.291255752794, 0.299145299145, 0.301775147929, 0.305719921105, 0.307034845496, 0.30966469428, 0.313609467456, 0.318869165023, 0.322813938199, 0.325443786982, 0.326758711374, 0.33070348455, 0.333333333333, 0.341222879684, 0.342537804076, 0.350427350427, 0.353057199211, 0.357001972387, 0.358316896778, 0.360946745562, 0.364891518738, 0.370151216305, 0.374095989481, 0.376725838264, 0.378040762656, 0.381985535832, 0.384615384615, 0.392504930966, 0.393819855358, 0.401709401709, 0.404339250493, 0.408284023669, 0.40959894806, 0.412228796844, 0.41617357002, 0.421433267587, 0.425378040763, 0.428007889546, 0.429322813938, 0.433267587114, 0.435897435897, 0.443786982249, 0.44510190664, 0.452991452991, 0.455621301775, 0.459566074951, 0.460880999343, 0.463510848126, 0.467455621302, 0.472715318869, 0.476660092045, 0.479289940828, 0.48060486522, 0.484549638396, 0.487179487179, 0.495069033531, 0.496383957922, 0.504273504274, 0.506903353057, 0.510848126233, 0.512163050625, 0.514792899408, 0.518737672584, 0.523997370151, 0.527942143327, 0.53057199211, 0.531886916502, 0.535831689678, 0.538461538462, 0.546351084813, 0.547666009204, 0.555555555556, 0.558185404339, 0.562130177515, 0.563445101907, 0.56607495069, 0.570019723866, 0.575279421433, 0.579224194609, 0.581854043393, 0.583168967784, 0.58711374096, 0.589743589744, 0.597633136095, 0.598948060487, 0.606837606838, 0.609467455621, 0.613412228797, 0.614727153189, 0.617357001972, 0.621301775148, 0.626561472715, 0.630506245891, 0.633136094675, 0.634451019066, 0.638395792242, 0.641025641026, 0.648915187377, 0.650230111769, 0.65811965812, 0.660749506903, 0.664694280079, 0.666009204471, 0.668639053254, 0.67258382643, 0.677843523997, 0.681788297173, 0.684418145957, 0.685733070348, 0.689677843524, 0.692307692308, 0.700197238659, 0.701512163051, 0.709401709402, 0.712031558185, 0.715976331361, 0.717291255753, 0.719921104536, 0.723865877712, 0.729125575279, 0.733070348455, 0.735700197239, 0.737015121631, 0.740959894806, 0.74358974359, 0.751479289941, 0.752794214333, 0.760683760684, 0.763313609467, 0.767258382643, 0.768573307035, 0.771203155819, 0.775147928994, 0.780407626561, 0.784352399737, 0.786982248521, 0.788297172913, 0.792241946088, 0.794871794872, 0.802761341223, 0.804076265615, 0.811965811966, 0.81459566075, 0.818540433925, 0.819855358317, 0.822485207101, 0.826429980276, 0.831689677844, 0.835634451019, 0.838264299803, 0.839579224195, 0.84352399737, 0.846153846154, 0.854043392505, 0.855358316897, 0.863247863248, 0.865877712032, 0.869822485207, 0.871137409599, 0.873767258383, 0.877712031558, 0.882971729126, 0.886916502301, 0.889546351085, 0.890861275477, 0.894806048652, 0.897435897436, 0.905325443787, 0.906640368179, 0.91452991453, 0.917159763314, 0.921104536489, 0.922419460881, 0.925049309665, 0.92899408284, 0.934253780408, 0.938198553583, 0.940828402367, 0.942143326759, 0.946088099934, 0.948717948718, 0.956607495069, 0.957922419461, 0.965811965812, 0.968441814596, 0.972386587771, 0.973701512163, 0.976331360947, 0.980276134122, 0.98553583169, 0.989480604865, 0.992110453649, 0.993425378041, 0.997370151216]
pattern_even=[0.0, 0.00788954635, 0.00920447074, 0.01709401709, 0.01972386588, 0.02366863905, 0.02498356345, 0.02761341223, 0.0315581854, 0.03681788297, 0.04076265615, 0.04339250493, 0.04470742932, 0.0486522025, 0.05128205128, 0.05917159763, 0.06048652203, 0.06837606838, 0.07100591716, 0.07495069034, 0.07626561473, 0.07889546351, 0.08284023669, 0.08809993425, 0.09204470743, 0.09467455621, 0.09598948061, 0.09993425378, 0.10256410256, 0.11045364892, 0.11176857331, 0.11965811966, 0.12228796844, 0.12623274162, 0.12754766601, 0.13017751479, 0.13412228797, 0.13938198554, 0.14332675871, 0.1459566075, 0.14727153189, 0.15121630506, 0.15384615385, 0.1617357002, 0.16305062459, 0.17094017094, 0.17357001972, 0.1775147929, 0.17882971729, 0.18145956608, 0.18540433925, 0.19066403682, 0.19460880999, 0.19723865878, 0.19855358317, 0.20249835635, 0.20512820513, 0.21301775148, 0.21433267587, 0.22222222222, 0.22485207101, 0.22879684418, 0.23011176857, 0.23274161736, 0.23668639053, 0.2419460881, 0.24589086128, 0.24852071006, 0.24983563445, 0.25378040763, 0.25641025641, 0.26429980276, 0.26561472715, 0.2735042735, 0.27613412229, 0.28007889546, 0.28139381986, 0.28402366864, 0.28796844182, 0.29322813938, 0.29717291256, 0.29980276134, 0.30111768573, 0.30506245891, 0.30769230769, 0.31558185404, 0.31689677844, 0.32478632479, 0.32741617357, 0.33136094675, 0.33267587114, 0.33530571992, 0.3392504931, 0.34451019066, 0.34845496384, 0.35108481262, 0.35239973702, 0.35634451019, 0.35897435897, 0.36686390533, 0.36817882972, 0.37606837607, 0.37869822485, 0.38264299803, 0.38395792242, 0.3865877712, 0.39053254438, 0.39579224195, 0.39973701512, 0.40236686391, 0.4036817883, 0.40762656147, 0.41025641026, 0.41814595661, 0.419460881, 0.42735042735, 0.42998027613, 0.43392504931, 0.4352399737, 0.43786982249, 0.44181459566, 0.44707429323, 0.4510190664, 0.45364891519, 0.45496383958, 0.45890861276, 0.46153846154, 0.46942800789, 0.47074293228, 0.47863247863, 0.48126232742, 0.48520710059, 0.48652202498, 0.48915187377, 0.49309664694, 0.49835634451, 0.50230111769, 0.50493096647, 0.50624589086, 0.51019066404, 0.51282051282, 0.52071005917, 0.52202498356, 0.52991452992, 0.5325443787, 0.53648915187, 0.53780407627, 0.54043392505, 0.54437869823, 0.54963839579, 0.55358316897, 0.55621301775, 0.55752794214, 0.56147271532, 0.5641025641, 0.57199211045, 0.57330703485, 0.5811965812, 0.58382642998, 0.58777120316, 0.58908612755, 0.59171597633, 0.59566074951, 0.60092044707, 0.60486522025, 0.60749506903, 0.60880999343, 0.6127547666, 0.61538461539, 0.62327416174, 0.62458908613, 0.63247863248, 0.63510848126, 0.63905325444, 0.64036817883, 0.64299802761, 0.64694280079, 0.65220249836, 0.65614727153, 0.65877712032, 0.66009204471, 0.66403681788, 0.66666666667, 0.67455621302, 0.67587113741, 0.68376068376, 0.68639053254, 0.69033530572, 0.69165023011, 0.6942800789, 0.69822485207, 0.70348454964, 0.70742932281, 0.7100591716, 0.71137409599, 0.71531886917, 0.71794871795, 0.7258382643, 0.72715318869, 0.73504273504, 0.73767258383, 0.741617357, 0.74293228139, 0.74556213018, 0.74950690335, 0.75476660092, 0.7587113741, 0.76134122288, 0.76265614727, 0.76660092045, 0.76923076923, 0.77712031558, 0.77843523997, 0.78632478633, 0.78895463511, 0.79289940828, 0.79421433268, 0.79684418146, 0.80078895464, 0.8060486522, 0.80999342538, 0.81262327416, 0.81393819855, 0.81788297173, 0.82051282051, 0.82840236686, 0.82971729126, 0.83760683761, 0.84023668639, 0.84418145957, 0.84549638396, 0.84812623274, 0.85207100592, 0.85733070349, 0.86127547666, 0.86390532544, 0.86522024984, 0.86916502301, 0.8717948718, 0.87968441815, 0.88099934254, 0.88888888889, 0.89151873767, 0.89546351085, 0.89677843524, 0.89940828402, 0.9033530572, 0.90861275477, 0.91255752794, 0.91518737673, 0.91650230112, 0.92044707429, 0.92307692308, 0.93096646943, 0.93228139382, 0.94017094017, 0.94280078896, 0.94674556213, 0.94806048652, 0.95069033531, 0.95463510848, 0.95989480605, 0.96383957922, 0.96646942801, 0.9677843524, 0.97172912558, 0.97435897436, 0.98224852071, 0.9835634451, 0.99145299145, 0.99408284024, 0.99802761341, 0.9993425378]
averages_even={0.0: [0.0], 0.7258382643: [0.6923076923077, 0.3076923076923], 0.48126232742: [0.0769230769231, 0.9230769230769], 0.67587113741: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.05128205128: [0.0], 0.68376068376: [0.6666666666667, 0.3333333333333], 0.1775147929: [0.7692307692308, 0.2307692307692], 0.60092044707: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.0486522025: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.63510848126: [0.0769230769231, 0.9230769230769], 0.81262327416: [0.5384615384615, 0.4615384615385], 0.29980276134: [0.5384615384615, 0.4615384615385], 0.70348454964: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.65877712032: [0.5384615384615, 0.4615384615385], 0.28007889546: [0.7692307692308, 0.2307692307692], 0.89151873767: [0.0769230769231, 0.9230769230769], 0.88099934254: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.13938198554: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.29717291256: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.22879684418: [0.7692307692308, 0.2307692307692], 0.8060486522: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.08284023669: [0.3846153846154, 0.6153846153846], 0.24589086128: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.84023668639: [0.0769230769231, 0.9230769230769], 0.34845496384: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.55358316897: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.49835634451: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.79421433268: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.38264299803: [0.7692307692308, 0.2307692307692], 0.94280078896: [0.0769230769231, 0.9230769230769], 0.09993425378: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.02498356345: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.58382642998: [0.0769230769231, 0.9230769230769], 0.25641025641: [0.0], 0.69033530572: [0.7692307692308, 0.2307692307692], 0.2735042735: [0.6666666666667, 0.3333333333333], 0.06837606838: [0.3333333333333, 0.6666666666667], 0.43392504931: [0.7692307692308, 0.2307692307692], 0.7100591716: [0.5384615384615, 0.4615384615385], 0.5811965812: [0.6666666666667, 0.3333333333333], 0.97172912558: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.4510190664: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.7587113741: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.30769230769: [0.0], 0.95069033531: [0.8461538461538, 0.1538461538462], 0.57330703485: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.32478632479: [0.6666666666667, 0.3333333333333], 0.48520710059: [0.7692307692308, 0.2307692307692], 0.17094017094: [0.6666666666667, 0.3333333333333], 0.54043392505: [0.8461538461538, 0.1538461538462], 0.78632478633: [0.6666666666667, 0.3333333333333], 0.86127547666: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.35897435897: [0.0], 0.89940828402: [0.8461538461538, 0.1538461538462], 0.89546351085: [0.7692307692308, 0.2307692307692], 0.05917159763: [0.6923076923077, 0.3076923076923], 0.37606837607: [0.6666666666667, 0.3333333333333], 0.60880999343: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.30506245891: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.64299802761: [0.8461538461538, 0.1538461538462], 0.96383957922: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.10256410256: [0.0], 0.20512820513: [0.0], 0.90861275477: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.94674556213: [0.7692307692308, 0.2307692307692], 0.42735042735: [0.6666666666667, 0.3333333333333], 0.71137409599: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.28402366864: [0.8461538461538, 0.1538461538462], 0.22222222222: [0.6666666666667, 0.3333333333333], 0.74556213018: [0.8461538461538, 0.1538461538462], 0.30111768573: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.46153846154: [0.0], 0.41025641026: [0.0], 0.19460880999: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.11965811966: [0.6666666666667, 0.3333333333333], 0.81393819855: [0.8205128205128, 0.1794871794872, 0.4871794871795, 0.5128205128205], 0.33530571992: [0.8461538461538, 0.1538461538462], 0.99145299145: [0.3333333333333, 0.6666666666667], 0.84812623274: [0.8461538461538, 0.1538461538462], 0.08809993425: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.56147271532: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.83760683761: [0.6666666666667, 0.3333333333333], 0.44181459566: [0.3846153846154, 0.6153846153846], 0.59566074951: [0.3846153846154, 0.6153846153846], 0.04339250493: [0.5384615384615, 0.4615384615385], 0.3865877712: [0.8461538461538, 0.1538461538462], 0.97435897436: [0.0], 0.33136094675: [0.7692307692308, 0.2307692307692], 0.4036817883: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.66403681788: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.13017751479: [0.8461538461538, 0.1538461538462], 0.36817882972: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.43786982249: [0.8461538461538, 0.1538461538462], 0.00920447074: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.03681788297: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.45496383958: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.76660092045: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.62327416174: [0.6923076923077, 0.3076923076923], 0.93096646943: [0.6923076923077, 0.3076923076923], 0.07100591716: [0.0769230769231, 0.9230769230769], 0.99802761341: [0.7692307692308, 0.2307692307692], 0.12228796844: [0.0769230769231, 0.9230769230769], 0.6942800789: [0.8461538461538, 0.1538461538462], 0.69165023011: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.86916502301: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.18145956608: [0.8461538461538, 0.1538461538462], 0.9033530572: [0.3846153846154, 0.6153846153846], 0.92044707429: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.19855358317: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.25378040763: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.82840236686: [0.6923076923077, 0.3076923076923], 0.61538461539: [0.0], 0.85207100592: [0.3846153846154, 0.6153846153846], 0.28796844182: [0.3846153846154, 0.6153846153846], 0.89677843524: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.07626561473: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.23274161736: [0.8461538461538, 0.1538461538462], 0.3392504931: [0.3846153846154, 0.6153846153846], 0.24983563445: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.35634451019: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.73767258383: [0.0769230769231, 0.9230769230769], 0.9993425378: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.79684418146: [0.8461538461538, 0.1538461538462], 0.39053254438: [0.3846153846154, 0.6153846153846], 0.47863247863: [0.6666666666667, 0.3333333333333], 0.40762656147: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.9677843524: [0.8205128205128, 0.1794871794872, 0.4871794871795, 0.5128205128205], 0.26429980276: [0.6923076923077, 0.3076923076923], 0.28139381986: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.02761341223: [0.8461538461538, 0.1538461538462], 0.91650230112: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.51019066404: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.45890861276: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.92307692308: [0.0], 0.07889546351: [0.8461538461538, 0.1538461538462], 0.33267587114: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.12623274162: [0.7692307692308, 0.2307692307692], 0.49309664694: [0.3846153846154, 0.6153846153846], 0.78895463511: [0.0769230769231, 0.9230769230769], 0.55621301775: [0.5384615384615, 0.4615384615385], 0.9835634451: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.36686390533: [0.6923076923077, 0.3076923076923], 0.09598948061: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.62458908613: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.65220249836: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.58908612755: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.35239973702: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.41814595661: [0.6923076923077, 0.3076923076923], 0.96646942801: [0.5384615384615, 0.4615384615385], 0.54963839579: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.4352399737: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.72715318869: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.1459566075: [0.5384615384615, 0.4615384615385], 0.76134122288: [0.5384615384615, 0.4615384615385], 0.46942800789: [0.6923076923077, 0.3076923076923], 0.04076265615: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.48652202498: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.82971729126: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.68639053254: [0.0769230769231, 0.9230769230769], 0.16305062459: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.86390532544: [0.5384615384615, 0.4615384615385], 0.53648915187: [0.7692307692308, 0.2307692307692], 0.75476660092: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.93228139382: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.19723865878: [0.5384615384615, 0.4615384615385], 0.39973701512: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.50230111769: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.85733070349: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.13412228797: [0.3846153846154, 0.6153846153846], 0.21433267587: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.71531886917: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.76265614727: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.15121630506: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.37869822485: [0.0769230769231, 0.9230769230769], 0.63905325444: [0.7692307692308, 0.2307692307692], 0.95989480605: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.52991452992: [0.6666666666667, 0.3333333333333], 0.24852071006: [0.5384615384615, 0.4615384615385], 0.70742932281: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.5641025641: [0.0], 0.18540433925: [0.3846153846154, 0.6153846153846], 0.64694280079: [0.3846153846154, 0.6153846153846], 0.63247863248: [0.6666666666667, 0.3333333333333], 0.80999342538: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.20249835635: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.66666666667: [0.0], 0.82051282051: [0.0], 0.60486522025: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.01972386588: [0.0769230769231, 0.9230769230769], 0.2419460881: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.73504273504: [0.6666666666667, 0.3333333333333], 0.59171597633: [0.8461538461538, 0.1538461538462], 0.91255752794: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.76923076923: [0.0], 0.23668639053: [0.3846153846154, 0.6153846153846], 0.52071005917: [0.6923076923077, 0.3076923076923], 0.66009204471: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.11045364892: [0.6923076923077, 0.3076923076923], 0.17357001972: [0.0769230769231, 0.9230769230769], 0.8717948718: [0.0], 0.50493096647: [0.5384615384615, 0.4615384615385], 0.19066403682: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.94017094017: [0.6666666666667, 0.3333333333333], 0.69822485207: [0.3846153846154, 0.6153846153846], 0.12754766601: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.09467455621: [0.5384615384615, 0.4615384615385], 0.54437869823: [0.3846153846154, 0.6153846153846], 0.65614727153: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.86522024984: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.04470742932: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.22485207101: [0.0769230769231, 0.9230769230769], 0.51282051282: [0.0], 0.6127547666: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.91518737673: [0.5384615384615, 0.4615384615385], 0.1617357002: [0.6923076923077, 0.3076923076923], 0.06048652203: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.53780407627: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.31558185404: [0.6923076923077, 0.3076923076923], 0.17882971729: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.57199211045: [0.6923076923077, 0.3076923076923], 0.99408284024: [0.0769230769231, 0.9230769230769], 0.74950690335: [0.3846153846154, 0.6153846153846], 0.14727153189: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.01709401709: [0.6666666666667, 0.3333333333333], 0.55752794214: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.81788297173: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.67455621302: [0.6923076923077, 0.3076923076923], 0.26561472715: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.00788954635: [0.6923076923077, 0.3076923076923], 0.21301775148: [0.6923076923077, 0.3076923076923], 0.58777120316: [0.7692307692308, 0.2307692307692], 0.74293228139: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.07495069034: [0.7692307692308, 0.2307692307692], 0.23011176857: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.77712031558: [0.6923076923077, 0.3076923076923], 0.31689677844: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.5325443787: [0.0769230769231, 0.9230769230769], 0.95463510848: [0.3846153846154, 0.6153846153846], 0.52202498356: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.84549638396: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.35108481262: [0.5384615384615, 0.4615384615385], 0.87968441815: [0.6923076923077, 0.3076923076923], 0.09204470743: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.64036817883: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.84418145957: [0.7692307692308, 0.2307692307692], 0.741617357: [0.7692307692308, 0.2307692307692], 0.80078895464: [0.3846153846154, 0.6153846153846], 0.94806048652: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.40236686391: [0.5384615384615, 0.4615384615385], 0.98224852071: [0.6923076923077, 0.3076923076923], 0.419460881: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.77843523997: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.27613412229: [0.0769230769231, 0.9230769230769], 0.71794871795: [0.0], 0.29322813938: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.15384615385: [0.0], 0.45364891519: [0.5384615384615, 0.4615384615385], 0.48915187377: [0.8461538461538, 0.1538461538462], 0.47074293228: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.32741617357: [0.0769230769231, 0.9230769230769], 0.34451019066: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.88888888889: [0.6666666666667, 0.3333333333333], 0.79289940828: [0.7692307692308, 0.2307692307692], 0.44707429323: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.02366863905: [0.7692307692308, 0.2307692307692], 0.39579224195: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.0315581854: [0.3846153846154, 0.6153846153846], 0.42998027613: [0.0769230769231, 0.9230769230769], 0.50624589086: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.14332675871: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.38395792242: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.11176857331: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.60749506903: [0.5384615384615, 0.4615384615385]}
averages_odd={0.392504930966: [0.6923076923077, 0.3076923076923], 0.570019723866: [0.3846153846154, 0.6153846153846], 0.120315581854: [0.5384615384615, 0.4615384615385], 0.40959894806: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.168967784352: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.925049309665: [0.8461538461538, 0.1538461538462], 0.638395792242: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.443786982249: [0.6923076923077, 0.3076923076923], 0.67258382643: [0.3846153846154, 0.6153846153846], 0.993425378041: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.460880999343: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.563445101907: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.20315581854: [0.7692307692308, 0.2307692307692], 0.597633136095: [0.6923076923077, 0.3076923076923], 0.495069033531: [0.6923076923077, 0.3076923076923], 0.775147928994: [0.3846153846154, 0.6153846153846], 0.890861275477: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.220249835634: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.074293228139: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.057199211045: [0.3846153846154, 0.6153846153846], 0.84352399737: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.700197238659: [0.6923076923077, 0.3076923076923], 0.877712031558: [0.3846153846154, 0.6153846153846], 0.768573307035: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.677843523997: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.946088099934: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.802761341223: [0.6923076923077, 0.3076923076923], 0.980276134122: [0.3846153846154, 0.6153846153846], 0.897435897436: [0.0], 0.871137409599: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.882971729126: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.128205128205: [0.0], 0.905325443787: [0.6923076923077, 0.3076923076923], 0.108481262327: [0.3846153846154, 0.6153846153846], 0.145299145299: [0.3333333333333, 0.6666666666667], 0.634451019066: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.076923076923: [0.0], 0.042735042735: [0.3333333333333, 0.6666666666667], 0.179487179487: [0.0], 0.613412228797: [0.7692307692308, 0.2307692307692], 0.531886916502: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.094017094017: [0.3333333333333, 0.6666666666667], 0.976331360947: [0.8461538461538, 0.1538461538462], 0.196581196581: [0.3333333333333, 0.6666666666667], 0.025641025641: [0.0], 0.630506245891: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.01775147929: [0.5384615384615, 0.4615384615385], 0.53057199211: [0.5384615384615, 0.4615384615385], 0.230769230769: [0.0], 0.598948060487: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.247863247863: [0.3333333333333, 0.6666666666667], 0.835634451019: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.855358316897: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.633136094675: [0.5384615384615, 0.4615384615385], 0.780407626561: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.523997370151: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.096646942801: [0.0769230769231, 0.9230769230769], 0.701512163051: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.558185404339: [0.0769230769231, 0.9230769230769], 0.735700197239: [0.5384615384615, 0.4615384615385], 0.98553583169: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.626561472715: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.740959894806: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.804076265615: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.538461538462: [0.0], 0.113740959895: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.838264299803: [0.5384615384615, 0.4615384615385], 0.155818540434: [0.8461538461538, 0.1538461538462], 0.729125575279: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.906640368179: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.763313609467: [0.0769230769231, 0.9230769230769], 0.172912557528: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.2741617357: [0.5384615384615, 0.4615384615385], 0.940828402367: [0.5384615384615, 0.4615384615385], 0.045364891519: [0.0769230769231, 0.9230769230769], 0.291255752794: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.831689677844: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.865877712032: [0.0769230769231, 0.9230769230769], 0.325443786982: [0.5384615384615, 0.4615384615385], 0.579224194609: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.207100591716: [0.8461538461538, 0.1538461538462], 0.342537804076: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.934253780408: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.733070348455: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.504273504274: [0.3333333333333, 0.6666666666667], 0.22419460881: [0.8205128205128, 0.1794871794872, 0.4871794871795, 0.5128205128205], 0.376725838264: [0.5384615384615, 0.4615384615385], 0.681788297173: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.019066403682: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.393819855358: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.250493096647: [0.0769230769231, 0.9230769230769], 0.794871794872: [0.0], 0.084812623274: [0.6923076923077, 0.3076923076923], 0.606837606838: [0.3333333333333, 0.6666666666667], 0.062458908613: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.428007889546: [0.5384615384615, 0.4615384615385], 0.641025641026: [0.0], 0.510848126233: [0.7692307692308, 0.2307692307692], 0.44510190664: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.267587113741: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.301775147929: [0.0769230769231, 0.9230769230769], 0.666009204471: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.709401709402: [0.6666666666667, 0.3333333333333], 0.318869165023: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.56607495069: [0.8461538461538, 0.1538461538462], 0.479289940828: [0.5384615384615, 0.4615384615385], 0.101906640368: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.74358974359: [0.0], 0.496383957922: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.921104536489: [0.7692307692308, 0.2307692307692], 0.581854043393: [0.5384615384615, 0.4615384615385], 0.353057199211: [0.0769230769231, 0.9230769230769], 0.070348454964: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.811965811966: [0.3333333333333, 0.6666666666667], 0.370151216305: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.668639053254: [0.8461538461538, 0.1538461538462], 0.989480604865: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.846153846154: [0.0], 0.968441814596: [0.0769230769231, 0.9230769230769], 0.404339250493: [0.0769230769231, 0.9230769230769], 0.737015121631: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.421433267587: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.771203155819: [0.8461538461538, 0.1538461538462], 0.948717948718: [0.0], 0.518737672584: [0.3846153846154, 0.6153846153846], 0.455621301775: [0.0769230769231, 0.9230769230769], 0.818540433925: [0.7692307692308, 0.2307692307692], 0.472715318869: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.873767258383: [0.8461538461538, 0.1538461538462], 0.58711374096: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.104536489152: [0.8461538461538, 0.1538461538462], 0.621301775148: [0.3846153846154, 0.6153846153846], 0.137409598948: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.942143326759: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.715976331361: [0.7692307692308, 0.2307692307692], 0.894806048652: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.512163050625: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.689677843524: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.546351084813: [0.6923076923077, 0.3076923076923], 0.723865877712: [0.3846153846154, 0.6153846153846], 0.254437869822: [0.7692307692308, 0.2307692307692], 0.121630506246: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.171597633136: [0.5384615384615, 0.4615384615385], 0.614727153189: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.271531886917: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.792241946088: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.648915187377: [0.6923076923077, 0.3076923076923], 0.826429980276: [0.3846153846154, 0.6153846153846], 0.18869165023: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.305719921105: [0.7692307692308, 0.2307692307692], 0.92899408284: [0.3846153846154, 0.6153846153846], 0.049309664694: [0.7692307692308, 0.2307692307692], 0.717291255753: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.322813938199: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.125575279421: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.889546351085: [0.5384615384615, 0.4615384615385], 0.751479289941: [0.6923076923077, 0.3076923076923], 0.886916502301: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.033530571992: [0.6923076923077, 0.3076923076923], 0.684418145957: [0.5384615384615, 0.4615384615385], 0.357001972387: [0.7692307692308, 0.2307692307692], 0.222879684418: [0.5384615384615, 0.4615384615385], 0.374095989481: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.997370151216: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.854043392505: [0.6923076923077, 0.3076923076923], 0.159763313609: [0.3846153846154, 0.6153846153846], 0.239973701512: [0.2051282051282, 0.7948717948718, 0.1282051282051, 0.8717948717949], 0.408284023669: [0.7692307692308, 0.2307692307692], 0.922419460881: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.425378040763: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.176857330703: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.282051282051: [0.0], 0.956607495069: [0.6923076923077, 0.3076923076923], 0.819855358317: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.299145299145: [0.3333333333333, 0.6666666666667], 0.617357001972: [0.8461538461538, 0.1538461538462], 0.459566074951: [0.7692307692308, 0.2307692307692], 0.476660092045: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.050624589086: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.333333333333: [0.0], 0.211045364892: [0.3846153846154, 0.6153846153846], 0.350427350427: [0.3333333333333, 0.6666666666667], 0.034845496384: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.147928994083: [0.0769230769231, 0.9230769230769], 0.228139381986: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.384615384615: [0.0], 0.401709401709: [0.3333333333333, 0.6666666666667], 0.165023011177: [0.4102564102564, 0.2564102564103, 0.5897435897436, 0.7435897435897], 0.258382642998: [0.8461538461538, 0.1538461538462], 0.784352399737: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.275476660092: [0.8205128205128, 0.1794871794872, 0.4871794871795, 0.5128205128205], 0.435897435897: [0.0], 0.452991452991: [0.3333333333333, 0.6666666666667], 0.30966469428: [0.8461538461538, 0.1538461538462], 0.547666009204: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.199211045365: [0.0769230769231, 0.9230769230769], 0.326758711374: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.487179487179: [0.0], 0.786982248521: [0.5384615384615, 0.4615384615385], 0.136094674556: [0.6923076923077, 0.3076923076923], 0.216305062459: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.360946745562: [0.8461538461538, 0.1538461538462], 0.650230111769: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.506903353057: [0.0769230769231, 0.9230769230769], 0.378040762656: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.15318869165: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.575279421433: [0.4102564102564, 0.5897435897436, 0.2564102564103, 0.7435897435897], 0.015121630506: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.412228796844: [0.8461538461538, 0.1538461538462], 0.752794214333: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.609467455621: [0.0769230769231, 0.9230769230769], 0.429322813938: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.011176857331: [0.2564102564103, 0.5897435897436, 0.7435897435897, 0.4102564102564], 0.660749506903: [0.0769230769231, 0.9230769230769], 0.187376725838: [0.6923076923077, 0.3076923076923], 0.463510848126: [0.8461538461538, 0.1538461538462], 0.712031558185: [0.0769230769231, 0.9230769230769], 0.48060486522: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.204470742932: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.066403681788: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.05325443787: [0.8461538461538, 0.1538461538462], 0.957922419461: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.81459566075: [0.0769230769231, 0.9230769230769], 0.527942143327: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.992110453649: [0.5384615384615, 0.4615384615385], 0.562130177515: [0.7692307692308, 0.2307692307692], 0.23865877712: [0.6923076923077, 0.3076923076923], 0.262327416174: [0.3846153846154, 0.6153846153846], 0.917159763314: [0.0769230769231, 0.9230769230769], 0.279421433268: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.91452991453: [0.6666666666667, 0.3333333333333], 0.023011176857: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.664694280079: [0.7692307692308, 0.2307692307692], 0.973701512163: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.313609467456: [0.3846153846154, 0.6153846153846], 0.555555555556: [0.6666666666667, 0.3333333333333], 0.100591715976: [0.7692307692308, 0.2307692307692], 0.33070348455: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.589743589744: [0.0], 0.767258382643: [0.7692307692308, 0.2307692307692], 0.069033530572: [0.5384615384615, 0.4615384615385], 0.364891518738: [0.3846153846154, 0.6153846153846], 0.65811965812: [0.3333333333333, 0.6666666666667], 0.514792899408: [0.8461538461538, 0.1538461538462], 0.381985535832: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.692307692308: [0.0], 0.11768573307: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.869822485207: [0.7692307692308, 0.2307692307692], 0.583168967784: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.255752794214: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.41617357002: [0.3846153846154, 0.6153846153846], 0.760683760684: [0.6666666666667, 0.3333333333333], 0.086127547666: [0.2051282051282, 0.1282051282051, 0.7948717948718, 0.8717948717949], 0.965811965812: [0.6666666666667, 0.3333333333333], 0.938198553583: [0.5641025641026, 0.1025641025641, 0.8974358974359, 0.4358974358974], 0.433267587114: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.289940828402: [0.6923076923077, 0.3076923076923], 0.972386587771: [0.7692307692308, 0.2307692307692], 0.005917159763: [0.3846153846154, 0.6153846153846], 0.685733070348: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.307034845496: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.467455621302: [0.3846153846154, 0.6153846153846], 0.863247863248: [0.6666666666667, 0.3333333333333], 0.719921104536: [0.8461538461538, 0.1538461538462], 0.001972386588: [0.8461538461538, 0.1538461538462], 0.484549638396: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821], 0.341222879684: [0.6923076923077, 0.3076923076923], 0.788297172913: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.358316896778: [0.025641025641, 0.6410256410256, 0.3589743589744, 0.974358974359], 0.839579224195: [0.5128205128205, 0.1794871794872, 0.4871794871795, 0.8205128205128], 0.822485207101: [0.8461538461538, 0.1538461538462], 0.151873767258: [0.7692307692308, 0.2307692307692], 0.535831689678: [0.2820512820513, 0.9487179487179, 0.7179487179487, 0.0512820512821]}
| 9,422.6
| 17,605
| 0.79477
| 5,722
| 47,113
| 6.542992
| 0.14051
| 0.004487
| 0.003446
| 0.029167
| 0.563316
| 0.352627
| 0.352627
| 0.352627
| 0.351131
| 0
| 0
| 0.844683
| 0.060599
| 47,113
| 5
| 17,605
| 9,422.6
| 0.001243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
70ec22d4a94458ff555cdee8db530ccdbc870881
| 45
|
py
|
Python
|
spimagine/models/__init__.py
|
funkelab/spimagine
|
d7fb0aac8986421df339486e1f0d33d0ba1c820c
|
[
"BSD-3-Clause"
] | 116
|
2015-05-06T16:13:55.000Z
|
2022-03-21T21:13:15.000Z
|
spimagine/models/__init__.py
|
funkelab/spimagine
|
d7fb0aac8986421df339486e1f0d33d0ba1c820c
|
[
"BSD-3-Clause"
] | 33
|
2015-06-24T09:32:24.000Z
|
2020-08-26T11:46:21.000Z
|
spimagine/models/__init__.py
|
funkelab/spimagine
|
d7fb0aac8986421df339486e1f0d33d0ba1c820c
|
[
"BSD-3-Clause"
] | 19
|
2015-06-24T12:19:22.000Z
|
2021-11-28T21:38:21.000Z
|
# from .keyframe_model import TransformData
| 15
| 43
| 0.822222
| 5
| 45
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 45
| 3
| 43
| 15
| 0.923077
| 0.911111
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cb02efee2616abf46ddadb49e2778f6d6439cf40
| 184
|
py
|
Python
|
src/__init__.py
|
trueto/chnlp_bert
|
2cb9c34b394387081d1f018f5dd56af90c8a05b5
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
trueto/chnlp_bert
|
2cb9c34b394387081d1f018f5dd56af90c8a05b5
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
trueto/chnlp_bert
|
2cb9c34b394387081d1f018f5dd56af90c8a05b5
|
[
"Apache-2.0"
] | null | null | null |
from .configuration_chbert import CHBertConfig
from .tokenization_chbert import CHBertTokenizer,CHBertTokenizerFast
from .modeling_chbert import CHBertForPretraining, CHBertForMaskedLM
| 61.333333
| 68
| 0.907609
| 17
| 184
| 9.647059
| 0.647059
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 184
| 3
| 69
| 61.333333
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cb0cd7e52d52acde72958a184323051d97db6d64
| 285
|
py
|
Python
|
venv/lib/python2.7/site-packages/newrelic-2.62.0.47/newrelic/extras/framework_django/templatetags/newrelic_tags.py
|
CharleyFarley/ovvio
|
81489ee64f91e4aab908731ce6ddf59edb9314bf
|
[
"MIT"
] | 1
|
2016-04-22T15:44:08.000Z
|
2016-04-22T15:44:08.000Z
|
lib/python2.7/site-packages/newrelic-2.12.0.10/newrelic/extras/framework_django/templatetags/newrelic_tags.py
|
Arable/evepod
|
85638e76cb8529e8ed38fa9849596374719bcb91
|
[
"Apache-2.0"
] | null | null | null |
lib/python2.7/site-packages/newrelic-2.12.0.10/newrelic/extras/framework_django/templatetags/newrelic_tags.py
|
Arable/evepod
|
85638e76cb8529e8ed38fa9849596374719bcb91
|
[
"Apache-2.0"
] | null | null | null |
import django.template
from newrelic.hooks.framework_django import (
newrelic_browser_timing_header, newrelic_browser_timing_footer)
register = django.template.Library()
register.simple_tag(newrelic_browser_timing_header)
register.simple_tag(newrelic_browser_timing_footer)
| 28.5
| 71
| 0.85614
| 35
| 285
| 6.542857
| 0.428571
| 0.262009
| 0.366812
| 0.235808
| 0.331878
| 0.331878
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 285
| 9
| 72
| 31.666667
| 0.877395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cb3e84add35ebf80046d3c15e6381658fd251434
| 794
|
py
|
Python
|
images/splunk/deployment/init_cluster_master.py
|
halr9000/docker-splunk-cluster
|
eebf20721fd7b88b1b9d92f919594a79c6758b10
|
[
"OLDAP-2.6",
"OLDAP-2.4"
] | null | null | null |
images/splunk/deployment/init_cluster_master.py
|
halr9000/docker-splunk-cluster
|
eebf20721fd7b88b1b9d92f919594a79c6758b10
|
[
"OLDAP-2.6",
"OLDAP-2.4"
] | null | null | null |
images/splunk/deployment/init_cluster_master.py
|
halr9000/docker-splunk-cluster
|
eebf20721fd7b88b1b9d92f919594a79c6758b10
|
[
"OLDAP-2.6",
"OLDAP-2.4"
] | 1
|
2021-08-09T09:18:21.000Z
|
2021-08-09T09:18:21.000Z
|
import os
import init_helpers
def configurations():
return {
"components": {
"kvstore": False,
"web": False,
"indexing": False,
"dmc": False
}
}
def substitution():
return {
"@CLUSTERING_PASS_4_SYMM_KEY@": os.environ.get("INIT_CLUSTERING_PASS_4_SYMM_KEY", "clustering-changeme"),
"@CLUSTERING_REPLICATION_FACTOR@": os.environ.get("INIT_CLUSTERING_REPLICATION_FACTOR", "1"),
"@CLUSTERING_SEARCH_FACTOR@": os.environ.get("INIT_CLUSTERING_SEARCH_FACTOR", "1"),
"@CLUSTERING_CLUSTER_LABEL@": os.environ.get("INIT_CLUSTERING_CLUSTER_LABEL", "cluster"),
"@INDEX_DISCOVERY_PASS_4_SYMM_KEY@": os.environ.get("INIT_INDEX_DISCOVERY_PASS_4_SYMM_KEY", "indexdiscovery-changeme")
}
| 31.76
| 126
| 0.65869
| 87
| 794
| 5.597701
| 0.356322
| 0.092402
| 0.123203
| 0.164271
| 0.447639
| 0.328542
| 0.11499
| 0.11499
| 0
| 0
| 0
| 0.009539
| 0.207809
| 794
| 24
| 127
| 33.083333
| 0.764706
| 0
| 0
| 0.105263
| 0
| 0
| 0.484887
| 0.410579
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| true
| 0.105263
| 0.105263
| 0.105263
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
|
0
| 5
|
cb93392e5526dc8ebea3e2b74189d54d57d61478
| 45
|
py
|
Python
|
lesion_tool/__init__.py
|
atsuch/nighres
|
eb6265befb0b65b99c858ecb1c328d4d63e5a293
|
[
"Apache-2.0"
] | null | null | null |
lesion_tool/__init__.py
|
atsuch/nighres
|
eb6265befb0b65b99c858ecb1c328d4d63e5a293
|
[
"Apache-2.0"
] | null | null | null |
lesion_tool/__init__.py
|
atsuch/nighres
|
eb6265befb0b65b99c858ecb1c328d4d63e5a293
|
[
"Apache-2.0"
] | 1
|
2019-01-21T10:53:38.000Z
|
2019-01-21T10:53:38.000Z
|
from lesion_pipeline import Lesion_extractor
| 22.5
| 44
| 0.911111
| 6
| 45
| 6.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cbb367ea4dd17d626424ffbed69554f53dc78a43
| 229
|
py
|
Python
|
dvalib/analysis.py
|
pune-lug/DeepVideoAnalytics
|
2650037040dca49b0f537df576af123dae8cef97
|
[
"Apache-2.0"
] | null | null | null |
dvalib/analysis.py
|
pune-lug/DeepVideoAnalytics
|
2650037040dca49b0f537df576af123dae8cef97
|
[
"Apache-2.0"
] | null | null | null |
dvalib/analysis.py
|
pune-lug/DeepVideoAnalytics
|
2650037040dca49b0f537df576af123dae8cef97
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import scenedetect
class FrameAnalysis(object):
def __init__(self,path):
pass
def blurryness(self):
return cv2.Laplacian(self.image, cv2.CV_64F).var()
class VideoAnalysis(object):
pass
| 15.266667
| 58
| 0.68559
| 28
| 229
| 5.428571
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027933
| 0.218341
| 229
| 14
| 59
| 16.357143
| 0.821229
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.222222
| 0.111111
| 0.777778
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
cbc1a64b1c662a1eff39151a8d1322458bde121c
| 5,718
|
py
|
Python
|
UpdatedSyntheticDataset/SyntheticDataset2/modular_synthetic_dataset_maker.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
UpdatedSyntheticDataset/SyntheticDataset2/modular_synthetic_dataset_maker.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
UpdatedSyntheticDataset/SyntheticDataset2/modular_synthetic_dataset_maker.py
|
liyu711/SUAS
|
2f6592fc2ab316475eeabe2f4828e5ba5c1a4b0b
|
[
"MIT"
] | null | null | null |
import os, multiprocessing, timeit
from SyntheticDataset2.ImageCreator import *
from SyntheticDataset2.logger import Logger
class ModularSyntheticDatasetMaker(object):
@staticmethod
def run_modular_target_maps_generator(number_of_target_maps, number_of_targets_on_each_map, process_number):
for index in range(number_of_target_maps):
target_map = ModularTargetMap(number_of_targets_on_each_map)
target_map.create_modular_target_map()
Logger.log("Current process: " + str((process_number / number_of_target_maps) + 1) + "\n")
target_map.record_modular_target_map(process_number + index + 1)
@staticmethod
def create_modular_target_maps(number_of_target_maps, number_of_targets_on_each_map):
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY)):
pass
else:
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY)
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps")):
raise Exception("Cannot create Modular Target Maps: Save directory already exists")
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps_answers")):
raise Exception("Cannot create Modular Target Maps Answers: Save directory already exists")
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps")
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps_answers")
cpu_count = multiprocessing.cpu_count()
pics_per_process = (number_of_target_maps / cpu_count) + 1
start_time = timeit.default_timer()
jobs = []
for index in range(cpu_count):
starting_index = index * int(pics_per_process)
image_generation_process = multiprocessing.Process(target=ModularSyntheticDatasetMaker.run_modular_target_maps_generator, args=(pics_per_process, number_of_targets_on_each_map, starting_index))
jobs.append(image_generation_process)
image_generation_process.start()
for job in jobs:
job.join()
Logger.log("Modular Target Maps saved at: " + Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps")
Logger.log("Modular Target Maps Answers saved at: " + Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps_answers\n")
print("====================================")
print("Total number of modular target maps generated:", len(os.listdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_target_maps")))
print("Total elapsed time (sec):", timeit.default_timer() - start_time)
print("====================================")
@staticmethod
def run_modular_single_targets_with_background_generator(number_of_single_targets_with_background, process_number):
for index in range(number_of_single_targets_with_background):
single_target = ModularTargetWithBackground()
single_target.create_modular_target_with_background()
Logger.log("Current process: " + str((process_number / number_of_single_targets_with_background) + 1) + "\n")
single_target.record_modular_target_with_background(process_number + index + 1)
@staticmethod
def create_modular_single_targets_with_background(number_of_single_targets_with_background):
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY)):
pass
else:
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY)
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background")):
raise Exception("Cannot create Modular Single Targets With Background: Save directory already exists")
if (os.path.isdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background_answers")):
raise Exception("Cannot create Modular Single Targets With Background Answers: Save directory already exists")
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background")
os.mkdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background_answers")
cpu_count = multiprocessing.cpu_count()
pics_per_process = (number_of_single_targets_with_background / cpu_count) + 1
start_time = timeit.default_timer()
jobs = []
for index in range(cpu_count):
starting_index = index * int(pics_per_process)
image_generation_process = multiprocessing.Process(target=ModularSyntheticDatasetMaker.run_modular_single_targets_with_background_generator, args=(pics_per_process, starting_index))
jobs.append(image_generation_process)
image_generation_process.start()
for job in jobs:
job.join()
Logger.log("Modular Single Targets With Background saved at: " + Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background")
Logger.log("Modular Single Targets With Background Answers saved at: " + Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background_answers\n")
print("====================================")
print("Total number of modular single targets with background generated:", len(os.listdir(Settings.SAVE_PATH + Settings.ANSWERS_DIRECTORY + "/modular_single_targets_with_background")))
print("Total elapsed time (sec):", timeit.default_timer() - start_time)
print("====================================")
| 60.189474
| 205
| 0.714586
| 662
| 5,718
| 5.806647
| 0.123867
| 0.080125
| 0.08845
| 0.140479
| 0.883975
| 0.845734
| 0.786941
| 0.725026
| 0.665713
| 0.60744
| 0
| 0.001705
| 0.179258
| 5,718
| 94
| 206
| 60.829787
| 0.817388
| 0
| 0
| 0.473684
| 0
| 0
| 0.225953
| 0.092515
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0.026316
| 0.039474
| 0
| 0.105263
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cbcc6029fc1a2b4410e6cce08524f670be3adce9
| 67
|
py
|
Python
|
compiler-rt/test/sanitizer_common/ios_commands/iossim_prepare.py
|
rarutyun/llvm
|
76fa6b3bcade074bdedef740001c4528e1aa08a8
|
[
"Apache-2.0"
] | 305
|
2019-09-14T17:16:05.000Z
|
2022-03-31T15:05:20.000Z
|
compiler-rt/test/sanitizer_common/ios_commands/iossim_prepare.py
|
rarutyun/llvm
|
76fa6b3bcade074bdedef740001c4528e1aa08a8
|
[
"Apache-2.0"
] | 410
|
2019-06-06T20:52:32.000Z
|
2022-01-18T14:21:48.000Z
|
compiler-rt/test/sanitizer_common/ios_commands/iossim_prepare.py
|
rarutyun/llvm
|
76fa6b3bcade074bdedef740001c4528e1aa08a8
|
[
"Apache-2.0"
] | 50
|
2019-05-10T21:12:24.000Z
|
2022-01-21T06:39:47.000Z
|
#!/usr/bin/env python
import json
print(json.dumps({"env": {}}))
| 11.166667
| 30
| 0.626866
| 10
| 67
| 4.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 67
| 5
| 31
| 13.4
| 0.711864
| 0.298507
| 0
| 0
| 0
| 0
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
1dcc0232d30c1b98a477811053ab7a44137102f7
| 222
|
py
|
Python
|
py/5.py
|
ProgrammerKid/euler
|
bdccf3dd127aa421033613e57ed323a6e67a64ae
|
[
"BSD-3-Clause"
] | null | null | null |
py/5.py
|
ProgrammerKid/euler
|
bdccf3dd127aa421033613e57ed323a6e67a64ae
|
[
"BSD-3-Clause"
] | null | null | null |
py/5.py
|
ProgrammerKid/euler
|
bdccf3dd127aa421033613e57ed323a6e67a64ae
|
[
"BSD-3-Clause"
] | null | null | null |
import lists, multiples
print "The LCM of all numbers below 10 is:\n" + str(1 * 2 * 2 * 2 * 3 * 3 * 5 * 7)
print "The LCM of all the numbers below 20 is:\n" + str(1 * 2 * 2 * 2 * 2 * 3 * 3 * 5 * 7 * 11 * 13 * 17 * 19)
| 37
| 110
| 0.540541
| 47
| 222
| 2.553191
| 0.510638
| 0.083333
| 0.075
| 0.216667
| 0.508333
| 0.241667
| 0.241667
| 0
| 0
| 0
| 0
| 0.188312
| 0.306306
| 222
| 5
| 111
| 44.4
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
1df07f953398fc58b0494fbc53ac845983fd1a31
| 19
|
py
|
Python
|
posthog/version.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 58
|
2020-08-26T16:26:18.000Z
|
2022-03-30T05:32:23.000Z
|
posthog/version.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 15
|
2021-11-09T10:49:34.000Z
|
2021-11-09T16:11:01.000Z
|
posthog/version.py
|
csmatar/posthog
|
4587cfe18625f302726c531f06a32c18e9749e9d
|
[
"MIT"
] | 13
|
2020-09-08T13:27:07.000Z
|
2022-03-19T17:27:10.000Z
|
VERSION = "1.29.1"
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3811e01d5eb50e0e0dc5c38f510e963966ec55fc
| 54
|
py
|
Python
|
linkml/reporting/__init__.py
|
krishna-saravan/linkml
|
8c34844ebaf054f44ceb386e4d51ee4c95dbebe6
|
[
"CC0-1.0"
] | 83
|
2021-03-17T16:31:02.000Z
|
2022-03-13T23:17:02.000Z
|
linkml/reporting/__init__.py
|
krishna-saravan/linkml
|
8c34844ebaf054f44ceb386e4d51ee4c95dbebe6
|
[
"CC0-1.0"
] | 390
|
2021-03-18T18:44:11.000Z
|
2022-03-30T22:55:01.000Z
|
linkml/reporting/__init__.py
|
krishna-saravan/linkml
|
8c34844ebaf054f44ceb386e4d51ee4c95dbebe6
|
[
"CC0-1.0"
] | 20
|
2021-03-27T08:55:56.000Z
|
2022-02-24T15:25:57.000Z
|
from linkml.reporting.model import Report, CheckResult
| 54
| 54
| 0.87037
| 7
| 54
| 6.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 1
| 54
| 54
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3830922017380323512350bda9c296eee3666fd5
| 120
|
py
|
Python
|
day_3/lists_iterable.py
|
anishLearnsToCode/python-training-1
|
ef5d6b64f888e167faecd1410563173dcc27f319
|
[
"MIT"
] | 3
|
2021-01-05T18:00:14.000Z
|
2021-11-28T15:43:04.000Z
|
day_3/lists_iterable.py
|
anishLearnsToCode/python-training-1
|
ef5d6b64f888e167faecd1410563173dcc27f319
|
[
"MIT"
] | null | null | null |
day_3/lists_iterable.py
|
anishLearnsToCode/python-training-1
|
ef5d6b64f888e167faecd1410563173dcc27f319
|
[
"MIT"
] | null | null | null |
"""
Time Complexity: O(1)
Space Complexity: O(1)
"""
for number in [2, 3, 5, 7, 11, 13, 19, 23, 29]:
print(number)
| 15
| 47
| 0.575
| 22
| 120
| 3.136364
| 0.818182
| 0.318841
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 0.216667
| 120
| 7
| 48
| 17.142857
| 0.56383
| 0.366667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.