hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
15ed9202ca042f9e6fb5d67850a74b8dda5d3886
| 129
|
py
|
Python
|
routes/socketio.py
|
knowsWhereHisTowelIs/pi-pyth-serv-socketio
|
91f85439ac7a33dc723e0614d7ebdfd3c8260ad4
|
[
"MIT"
] | null | null | null |
routes/socketio.py
|
knowsWhereHisTowelIs/pi-pyth-serv-socketio
|
91f85439ac7a33dc723e0614d7ebdfd3c8260ad4
|
[
"MIT"
] | null | null | null |
routes/socketio.py
|
knowsWhereHisTowelIs/pi-pyth-serv-socketio
|
91f85439ac7a33dc723e0614d7ebdfd3c8260ad4
|
[
"MIT"
] | null | null | null |
import include.WebServer as WebServer
@WebServer.addRoute('/sio')
def socketio():
return WebServer.render('sio-test.html')
| 18.428571
| 44
| 0.744186
| 16
| 129
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 129
| 6
| 45
| 21.5
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.131783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
15f16e80187d53d23cebdaa3a32819e1d3d6fccb
| 48
|
py
|
Python
|
t4k/utils/__init__.py
|
Yoshiki-Takahashi/tools4kaggle
|
eb2779687867e876f6beec1351140cfec046b152
|
[
"MIT"
] | null | null | null |
t4k/utils/__init__.py
|
Yoshiki-Takahashi/tools4kaggle
|
eb2779687867e876f6beec1351140cfec046b152
|
[
"MIT"
] | null | null | null |
t4k/utils/__init__.py
|
Yoshiki-Takahashi/tools4kaggle
|
eb2779687867e876f6beec1351140cfec046b152
|
[
"MIT"
] | null | null | null |
from t4k.utils.mem_reduction import to_lowerbit
| 24
| 47
| 0.875
| 8
| 48
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.083333
| 48
| 1
| 48
| 48
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c6459eb87134062b5d42f41b462d35c26182e9e1
| 194
|
py
|
Python
|
x-frame-options/support/redirect.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
x-frame-options/support/redirect.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 7,642
|
2018-05-28T09:38:03.000Z
|
2022-03-31T20:55:48.000Z
|
x-frame-options/support/redirect.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
def main(request, response):
response.status = 302
response.headers.set(b"X-Frame-Options", request.GET.first(b"value"))
response.headers.set(b"Location", request.GET.first(b"url"))
| 38.8
| 73
| 0.71134
| 29
| 194
| 4.758621
| 0.586207
| 0.217391
| 0.26087
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017442
| 0.113402
| 194
| 4
| 74
| 48.5
| 0.784884
| 0
| 0
| 0
| 0
| 0
| 0.159794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c655e5f115a927c6e006a3f9c3f56c17b8ca6033
| 131
|
py
|
Python
|
src/constants.py
|
rahmanmd86/mntn_challenge
|
3c909d2fbdb2c9304eda7e27198ffdd497f5e1f9
|
[
"Unlicense"
] | null | null | null |
src/constants.py
|
rahmanmd86/mntn_challenge
|
3c909d2fbdb2c9304eda7e27198ffdd497f5e1f9
|
[
"Unlicense"
] | null | null | null |
src/constants.py
|
rahmanmd86/mntn_challenge
|
3c909d2fbdb2c9304eda7e27198ffdd497f5e1f9
|
[
"Unlicense"
] | null | null | null |
GET_POSTS_RESPONSE_SCHEMA = 'get_posts_response_schema.json'
GET_POSTS_ALL_RESPONSE_SCHEMA = 'get_posts_all_response_schema.json'
| 32.75
| 68
| 0.885496
| 20
| 131
| 5.1
| 0.3
| 0.313725
| 0.313725
| 0.431373
| 0.490196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053435
| 131
| 3
| 69
| 43.666667
| 0.822581
| 0
| 0
| 0
| 0
| 0
| 0.492308
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c658bc31b2015b7f5456c0db3699c73a495fc15a
| 30
|
py
|
Python
|
Chapter 01/ch1_38.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 01/ch1_38.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
Chapter 01/ch1_38.py
|
bpbpublications/TEST-YOUR-SKILLS-IN-PYTHON-LANGUAGE
|
f6a4194684515495d00aa38347a725dd08f39a0c
|
[
"MIT"
] | null | null | null |
import sys
print(sys.maxsize)
| 15
| 18
| 0.8
| 5
| 30
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 2
| 18
| 15
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
d6a28878f8ea9cfc33e41d034537fd617f42edc3
| 2,703
|
py
|
Python
|
tascii.py
|
VyomaanDave0711/tascii
|
0b6c81921f9b642f77af606e0406bcb0e18ba2ce
|
[
"Apache-2.0"
] | 1
|
2021-09-20T00:21:29.000Z
|
2021-09-20T00:21:29.000Z
|
tascii.py
|
VyomaanDave0711/tascii
|
0b6c81921f9b642f77af606e0406bcb0e18ba2ce
|
[
"Apache-2.0"
] | null | null | null |
tascii.py
|
VyomaanDave0711/tascii
|
0b6c81921f9b642f77af606e0406bcb0e18ba2ce
|
[
"Apache-2.0"
] | 1
|
2020-10-02T05:19:42.000Z
|
2020-10-02T05:19:42.000Z
|
# ! FUNCTIONS
# ! LETTERS
a = """
/\\
/ \\
/ /\\ \\
/ ____ \\
/_/ \\_\\ """
b = """
____
| _ \\
| |_) |
| _ <
| |_) |
|____/
"""
c = """
_____
/ ____|
| |
| |
| |____
\_____|
"""
d = """
_____
| __ \
| | | |
| | | |
| |__| |
|_____/
"""
e = """
______
| ____|
| |__
| __|
| |____
|______|
"""
f = """
______
| ____|
| |__
| __|
| |
|_|
"""
g = """
_____
/ ____|
| | __
| | |_ |
| |__| |
\_____|
"""
h = """
_ _
| | | |
| |__| |
| __ |
| | | |
|_| |_|
"""
i = """
_____
|_ _|
| |
| |
_| |_
|_____|
"""
j = """
_
| |
| |
_ | |
| |__| |
\____/
"""
k = """
_ __
| |/ /
| ' /
| <
| . \
|_|\_\
"""
l = """
_
| |
| |
| |
| |____
|______|
"""
m = """
__ __
| \/ |
| \ / |
| |\/| |
| | | |
|_| |_|
"""
n = """
_ _
| \ | |
| \| |
| . ` |
| |\ |
|_| \_|
"""
o = """
____
/ __ \
| | | |
| | | |
| |__| |
\____/
"""
p = """
_____
| __ \
| |__) |
| ___/
| |
|_|
"""
q = """
____
/ __ \
| | | |
| | | |
| |__| |
\___\_\
"""
r = """
_____
| __ \
| |__) |
| _ /
| | \ \
|_| \_\
"""
s = """
_____
/ ____|
| (___
\___ \
____) |
|_____/
"""
t = """
_______
|__ __|
| |
| |
| |
|_|
"""
u = """
_ _
| | | |
| | | |
| | | |
| |__| |
\____/
"""
v = """
__ __
\ \ / /
\ \ / /
\ \/ /
\ /
\/
"""
w = """
__ __
\ \ / /
\ \ /\ / /
\ \/ \/ /
\ /\ /
\/ \/
"""
x = """
__ __
\ \ / /
\ V /
> <
/ . \
/_/ \_\
"""
y = """
__ __
\ \ / /
\ \_/ /
\ /
| |
|_|
"""
z = """
______
|___ /
/ /
/ /
/ /__
/_____|
"""
# ! NUMEBRS
0 = """
___
/ _ \
| | | |
| | | |
| |_| |
\___/
"""
1 = """
__
/_ |
| |
| |
| |
|_|
"""
2 = """
___
|__ \
) |
/ /
/ /_
|____|
"""
3 = """
____
|___ \
__) |
|__ <
___) |
|____/
"""
4 = """
_ _
| || |
| || |_
|__ _|
| |
|_|
"""
5 = """
_____
| ____|
| |__
|___ \
___) |
|____/
"""
6 = """
__
/ /
/ /_
| '_ \
| (_) |
\___/
"""
7 = """
______
|____ |
/ /
/ /
/ /
/_/
"""
8 = """
___
/ _ \
| (_) |
> _ <
| (_) |
\___/
"""
9 = """
___
/ _ \
| (_) |
\__, |
/ /
/_/
"""
| 8.092814
| 17
| 0.169441
| 40
| 2,703
| 1.5
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00821
| 0.54939
| 2,703
| 333
| 18
| 8.117117
| 0.041051
| 0.011839
| 0
| 0.723776
| 0
| 0
| 0.837899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d6b109f2f62dcac727616aaa0530b1d48c1acb17
| 222
|
py
|
Python
|
klap4/config/config.py
|
griffin962/backendbois
|
ae35fd772d096ce88a8eceb25de81e6bc4ef14e0
|
[
"MIT"
] | 1
|
2020-02-12T20:45:41.000Z
|
2020-02-12T20:45:41.000Z
|
klap4/config/config.py
|
griffin962/backendbois
|
ae35fd772d096ce88a8eceb25de81e6bc4ef14e0
|
[
"MIT"
] | null | null | null |
klap4/config/config.py
|
griffin962/backendbois
|
ae35fd772d096ce88a8eceb25de81e6bc4ef14e0
|
[
"MIT"
] | 1
|
2020-04-04T20:02:49.000Z
|
2020-04-04T20:02:49.000Z
|
from datetime import timedelta
def config():
return {
"clientOrigin": "http://localhost:8080",
"accessExpiration": timedelta(hours=6),
"refreshExpiration": timedelta(hours=6)
}
| 24.666667
| 48
| 0.603604
| 19
| 222
| 7.052632
| 0.789474
| 0.208955
| 0.223881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.27027
| 222
| 9
| 49
| 24.666667
| 0.790123
| 0
| 0
| 0
| 0
| 0
| 0.295964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.142857
| 0.142857
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
d6c1f0dcf2c0a56724de4ab249133a50e75c33a8
| 46
|
py
|
Python
|
tests/test_hf.py
|
hongzhouye/sigma-SCF
|
62e2dce538d1e68c4dc3c72fdf27beb1911e544f
|
[
"BSD-3-Clause"
] | 4
|
2016-07-30T22:02:50.000Z
|
2018-08-02T23:46:15.000Z
|
tests/test_hf.py
|
hongzhouye/sigma-SCF
|
62e2dce538d1e68c4dc3c72fdf27beb1911e544f
|
[
"BSD-3-Clause"
] | 11
|
2017-08-04T20:34:04.000Z
|
2017-08-08T23:07:42.000Z
|
tests/test_hf.py
|
hongzhouye/sigma-SCF
|
62e2dce538d1e68c4dc3c72fdf27beb1911e544f
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This is a test.
"""
def test():
pass
| 6.571429
| 15
| 0.478261
| 7
| 46
| 3.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 46
| 6
| 16
| 7.666667
| 0.6875
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
d6e952f0e218b750755111fc87bbaa5aed3e24e0
| 77
|
py
|
Python
|
priceprop/__init__.py
|
felixpatzelt/priceprop
|
038832b5e89b8559c6162e39f1b446f4446fe7f2
|
[
"MIT"
] | 17
|
2018-01-17T13:19:42.000Z
|
2022-01-25T14:02:10.000Z
|
priceprop/__init__.py
|
felixpatzelt/priceprop
|
038832b5e89b8559c6162e39f1b446f4446fe7f2
|
[
"MIT"
] | null | null | null |
priceprop/__init__.py
|
felixpatzelt/priceprop
|
038832b5e89b8559c6162e39f1b446f4446fe7f2
|
[
"MIT"
] | 7
|
2018-07-14T06:17:05.000Z
|
2021-05-16T13:59:47.000Z
|
from propagator import *
def __reload_submodules__():
reload(propagator)
| 19.25
| 28
| 0.779221
| 8
| 77
| 6.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 77
| 4
| 29
| 19.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bacccec48075da7d6861ec6e5cdb76ba39f09d57
| 8,695
|
py
|
Python
|
lang.py
|
rohan2546/TechArmy
|
c086157c7db8ec7eb137b81e7854e31f7dfb678b
|
[
"MIT"
] | 1
|
2020-03-12T06:21:09.000Z
|
2020-03-12T06:21:09.000Z
|
lang.py
|
rohan2546/TechArmy
|
c086157c7db8ec7eb137b81e7854e31f7dfb678b
|
[
"MIT"
] | null | null | null |
lang.py
|
rohan2546/TechArmy
|
c086157c7db8ec7eb137b81e7854e31f7dfb678b
|
[
"MIT"
] | null | null | null |
from subprocess import Popen, PIPE
import time
from multiprocessing.pool import ThreadPool
from multiprocessing import Pool
import os
import signal
# Getting JAVA class name
def get_class_name(program_path):
fptr = open(program_path+'.txt',"r")
contents = tuple(fptr)
fptr.close()
contents =[x.strip() for x in contents]
for lines in contents:
words = []
if 'class' in lines:
words = lines.split(' ')
for i in range(len(words)):
if words[i] == 'class':
return words[i+1]
break
class languages:
def __init__(self, student_id, problem_id, contest_id, time_out):
self.student_id = student_id
self.problem_id = problem_id
self.contest_id = contest_id
self.student_path = contest_id+"/temp_"+student_id
self.code_path = self.student_path+"/temp"
self.time_out = time_out
def processes_py(self, p):
code_path = self.code_path+".py"
pid = os.getpid()
fp = open("problems/"+self.problem_id+"/in"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
def signal_handler(signum, frame):
raise Exception("Timed out!")
timeout = False
#signal.signal(signal.SIGALRM, signal_handler)
# signal.alarm(self.time_out) # timeout seconds
stdout = ''
stderr = 'e'
#t = 2
# try:
start_time = time.time()
op = Popen(["timeout", "2s", "python", code_path], stdin=PIPE,
stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate(contents.encode("utf-8"))
t = (time.time() - start_time)
stdout = stdout.decode()
stderr = stderr.decode()
# try:
# os.kill(op.pid, signal.SIGKILL)
# except:
# pass
# except Exception as i:
# timeout = True
# return(stdout, stderr, t, pid)
# write code to compare output with test_case_op file and update value of status
# fp = open("problems/"+self.problem_id+"/op"+str(p)+".txt", "r")
# contents = fp.read()
# fp.close()
# status = (stdout == contents)
return(stdout, stderr, t, pid)
def get_number_of_testcases(self):
fp = open("problems/"+self.problem_id+"/number_cases.txt", "r")
contents = fp.read()
return (int(contents))
def py_lang(self):
code_path = self.code_path+".py"
# to check for compilation error; dont proceed into threading if compilation error
#op = Popen(["python", code_path], stdin=PIPE, stdout=PIPE, stderr=PIPE)
#stdout, stderr = op.communicate()
#stdout = stdout.decode()
#stderr = stderr.decode()
stderr = ''
try:
os.kill(op.pid, signal.SIGKILL)
except:
pass
if(stderr == ''):
testcases = self.get_number_of_testcases()
#p = Pool(processes=testcases)
p = ThreadPool()
results = p.map(self.processes_py, list(range(testcases)))
p.close()
# for stdout, stderr, t, pid in results:
# try:
# os.kill(op.pid, signal.SIGKILL)
# except:
# pass
# p.terminate()
# p.join()
return results, 1
else:
return stderr, 0
def processes_C(self, p):
fp = open("problems/"+self.problem_id+"/in"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
# def signal_handler(signum, frame):
# raise Exception("Timed out!")
# timeout = False
# signal.signal(signal.SIGALRM, signal_handler)
# signal.alarm(self.time_out) # timeout seconds
# try:
start_time = time.time()
op = Popen(["timeout","2s",self.student_path+"/./a.out"],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate(contents.encode("utf-8"))
t = (time.time() - start_time)
stdout = stdout.decode()
stderr = stderr.decode()
# except Exception as i:
# timeout = True
# return(timeout)
# write code to compare output with test_case_op file and update value of status
fp = open("problems/"+self.problem_id+"/op"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
status = (stdout == contents)
return(stdout, stderr, status, t)
def C_lang(self):
code_path = self.code_path+".c"
def get_number_of_testcases():
fp = open("problems/"+self.problem_id+"/number_cases.txt", "r")
contents = fp.read()
return (int(contents))
op = Popen(["gcc", "-w", code_path],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate()
stdout = stdout.decode()
stderr = stderr.decode()
if(stderr == ''):
testcases = self.get_number_of_testcases()
p = ThreadPool()
results = p.map(self.processes_C, list(range(testcases)))
p.close()
return results, 1
else:
return stderr, 0
def processes_cpp(self, p):
fp = open("problems/"+self.problem_id+"/in"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
# def signal_handler(signum, frame):
# raise Exception("Timed out!")
# timeout = False
# signal.signal(signal.SIGALRM, signal_handler)
# signal.alarm(self.time_out) # timeout seconds
# try:
start_time = time.time()
op = Popen(["timeout","2s",self.student_path+"/./a.out"],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate(contents.encode("utf-8"))
t = (time.time() - start_time)
stdout = stdout.decode()
stderr = stderr.decode()
# except Exception as i:
# timeout = True
# return(timeout)
# write code to compare output with test_case_op file and update value of status
fp = open("problems/"+self.problem_id+"/op"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
status = (stdout == contents)
return(stdout, stderr, status, t)
def Cpp_lang(self):
code_path = self.code_path+".cpp"
op = Popen(["g++", "-w", code_path],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate()
stdout = stdout.decode()
stderr = stderr.decode()
if(stderr == ''):
testcases = self.get_number_of_testcases()
p = ThreadPool()
results = p.map(self.processes_cpp, list(range(testcases)))
p.close()
return results, 1
else:
return stderr, 0
def processes_java(self, p):
fp = open("problems/"+self.problem_id+"/in"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
def signal_handler(signum, frame):
raise Exception("Timed out!")
timeout = False
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(self.time_out) # timeout seconds
# try:
start_time = time.time()
op = Popen(["timeout","2s","java", self.student_path+"/temp"],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate(contents.encode("utf-8"))
t = (time.time() - start_time)
stdout = stdout.decode()
stderr = stderr.decode()
# except Exception as i:
# timeout = True
# return(timeout)
# write code to compare output with test_case_op file and update value of status
fp = open("problems/"+self.problem_id+"/op"+str(p)+".txt", "r")
contents = fp.read()
fp.close()
status = (stdout == contents)
return(stdout, stderr, status, t)
def java_lang(self):
new_code_path = get_class_name(self.code_path)
code_path = new_code_path+".java"
op = Popen(["javac", code_path],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = op.communicate()
stdout = stdout.decode()
stderr = stderr.decode()
if(stderr != ''):
testcases = get_number_of_testcases()
p = ThreadPool()
results = p.map(self.processes_java, list(range(testcases)))
p.close()
return results, 1
else:
return stderr, 0
| 29.982759
| 90
| 0.544911
| 1,006
| 8,695
| 4.590457
| 0.131213
| 0.031182
| 0.028584
| 0.038978
| 0.804461
| 0.784972
| 0.779775
| 0.754006
| 0.744478
| 0.69424
| 0
| 0.003065
| 0.324554
| 8,695
| 289
| 91
| 30.086505
| 0.783245
| 0.191949
| 0
| 0.619048
| 0
| 0
| 0.048823
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0.005952
| 0.035714
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
245f45eea7e727d461ade2565ffdf4c4f6d4c9eb
| 44
|
py
|
Python
|
utokenize/testdata/basic.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 126
|
2019-07-19T14:42:41.000Z
|
2022-03-21T22:22:19.000Z
|
utokenize/testdata/basic.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 38
|
2019-08-28T01:46:31.000Z
|
2022-03-17T05:46:51.000Z
|
utokenize/testdata/basic.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 55
|
2019-08-02T09:32:33.000Z
|
2021-12-22T11:25:51.000Z
|
def foo():
print(1)
print(2)
foo()
| 7.333333
| 12
| 0.477273
| 7
| 44
| 3
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.318182
| 44
| 5
| 13
| 8.8
| 0.633333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
79f25d7d028cf2830459e871ed5384adb03e14b0
| 36
|
py
|
Python
|
main.py
|
Itsmemythic/Web-dev
|
82ad67ea157b42f17dad1ae6403b85bfeeb15c35
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
Itsmemythic/Web-dev
|
82ad67ea157b42f17dad1ae6403b85bfeeb15c35
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
Itsmemythic/Web-dev
|
82ad67ea157b42f17dad1ae6403b85bfeeb15c35
|
[
"Apache-2.0"
] | null | null | null |
import time
import os
import random
| 9
| 13
| 0.833333
| 6
| 36
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 36
| 3
| 14
| 12
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
031efcd9cfcaabc5f4c77c86e3aa024a7760d477
| 47
|
py
|
Python
|
conan/tools/intel/__init__.py
|
Wonders11/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
[
"MIT"
] | 6,205
|
2015-12-01T13:40:05.000Z
|
2022-03-31T07:30:25.000Z
|
conan/tools/intel/__init__.py
|
Wonders11/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
[
"MIT"
] | 8,747
|
2015-12-01T16:28:48.000Z
|
2022-03-31T23:34:53.000Z
|
conan/tools/intel/__init__.py
|
Mattlk13/conan
|
005fc53485557b0a570bb71670f2ca9c66082165
|
[
"MIT"
] | 961
|
2015-12-01T16:56:43.000Z
|
2022-03-31T13:50:52.000Z
|
from conan.tools.intel.intel_cc import IntelCC
| 23.5
| 46
| 0.851064
| 8
| 47
| 4.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0325162ac3f7f5c6b275a36da04091463fc4598c
| 31
|
py
|
Python
|
commands/__init__.py
|
izhx/allennlpadd
|
0907f1285121c6d02f5aacb326870ddb90342d31
|
[
"Apache-2.0"
] | null | null | null |
commands/__init__.py
|
izhx/allennlpadd
|
0907f1285121c6d02f5aacb326870ddb90342d31
|
[
"Apache-2.0"
] | null | null | null |
commands/__init__.py
|
izhx/allennlpadd
|
0907f1285121c6d02f5aacb326870ddb90342d31
|
[
"Apache-2.0"
] | null | null | null |
from .tune import tune # noqa
| 15.5
| 30
| 0.709677
| 5
| 31
| 4.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 31
| 1
| 31
| 31
| 0.916667
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
03322a70f32ff499835d1fb9c63bb83b8ea295ce
| 47
|
py
|
Python
|
app.py
|
kabutokungzz/Smartcam
|
8a031206ac7decc00554ba1f53c2dd2dbbaa0118
|
[
"MIT"
] | null | null | null |
app.py
|
kabutokungzz/Smartcam
|
8a031206ac7decc00554ba1f53c2dd2dbbaa0118
|
[
"MIT"
] | null | null | null |
app.py
|
kabutokungzz/Smartcam
|
8a031206ac7decc00554ba1f53c2dd2dbbaa0118
|
[
"MIT"
] | null | null | null |
import datetime
import main as app
app.main()
| 9.4
| 18
| 0.765957
| 8
| 47
| 4.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 4
| 19
| 11.75
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
03519f277b323b1bb86047784d46b5f5c705217c
| 219
|
py
|
Python
|
api/__init__.py
|
PeterYang21/plastering
|
7c7a21b2f18df78a9d8ec29f3d1d9f47d82c658f
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
PeterYang21/plastering
|
7c7a21b2f18df78a9d8ec29f3d1d9f47d82c658f
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
PeterYang21/plastering
|
7c7a21b2f18df78a9d8ec29f3d1d9f47d82c658f
|
[
"MIT"
] | null | null | null |
from flask import Flask
def create_app():
app = Flask(__name__)
register_blueprints(app)
return app
def register_blueprints(app):
from api import api_blueprint
app.register_blueprint(api_blueprint)
| 21.9
| 41
| 0.753425
| 29
| 219
| 5.344828
| 0.413793
| 0.232258
| 0.270968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182648
| 219
| 10
| 41
| 21.9
| 0.865922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.625
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
|
0
| 6
|
035b6530b460a3398ba474362472f30bf635c091
| 76
|
py
|
Python
|
tests/test_attribute/output.py
|
waadnakhleh/pythonformatter
|
5f622986aa4e2fcdf03e49041a7ddc14e66d1a2f
|
[
"MIT"
] | null | null | null |
tests/test_attribute/output.py
|
waadnakhleh/pythonformatter
|
5f622986aa4e2fcdf03e49041a7ddc14e66d1a2f
|
[
"MIT"
] | 19
|
2020-12-28T17:17:12.000Z
|
2021-12-22T20:44:42.000Z
|
tests/test_attribute/output.py
|
waadnakhleh/pythonformatter
|
5f622986aa4e2fcdf03e49041a7ddc14e66d1a2f
|
[
"MIT"
] | 1
|
2021-03-20T17:41:14.000Z
|
2021-03-20T17:41:14.000Z
|
def foo():
print("this is foo")
print(foo.__name__, foo.__qualname__)
| 12.666667
| 37
| 0.671053
| 11
| 76
| 3.909091
| 0.636364
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171053
| 76
| 5
| 38
| 15.2
| 0.68254
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
0373d15edcc321e87f9a20ab0d46f30fdc174fc6
| 81
|
py
|
Python
|
haversine_distance/__init__.py
|
dkerrgis/HaverRaster
|
2f4edb6e59ca78526104873074fe1b6dbea58d8e
|
[
"MIT"
] | null | null | null |
haversine_distance/__init__.py
|
dkerrgis/HaverRaster
|
2f4edb6e59ca78526104873074fe1b6dbea58d8e
|
[
"MIT"
] | null | null | null |
haversine_distance/__init__.py
|
dkerrgis/HaverRaster
|
2f4edb6e59ca78526104873074fe1b6dbea58d8e
|
[
"MIT"
] | null | null | null |
from .haver_raster import *
from .utils import *
from .distance_to_edge import *
| 20.25
| 31
| 0.777778
| 12
| 81
| 5
| 0.666667
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 81
| 3
| 32
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3028e60860c90fc588ec0c543d15fe3a93db3120
| 32,528
|
py
|
Python
|
tests/test_data_patterns.py
|
DeNederlandscheBank/data-patterns
|
bfef347c7580764eb0f11e5592e5f13343df5c4a
|
[
"MIT"
] | 7
|
2019-11-08T20:35:12.000Z
|
2022-02-01T18:53:47.000Z
|
tests/test_data_patterns.py
|
DeNederlandscheBank/data-patterns
|
bfef347c7580764eb0f11e5592e5f13343df5c4a
|
[
"MIT"
] | 1
|
2021-01-08T16:26:22.000Z
|
2021-01-17T16:05:10.000Z
|
tests/test_data_patterns.py
|
DeNederlandscheBank/data-patterns
|
bfef347c7580764eb0f11e5592e5f13343df5c4a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `data_patterns` package."""
import unittest
import os
from data_patterns import data_patterns
import pandas as pd
class TestData_patterns(unittest.TestCase):
"""Tests for `data_patterns` package."""
def test_pattern1(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
pattern = {'name' : 'Pattern 1',
'pattern' : '-->',
'P_columns': ['Type'],
'Q_columns': ['Assets', 'TV-life', 'TV-nonlife', 'Own funds'],
'encode' : {'Assets': 'reported',
'TV-life': 'reported',
'TV-nonlife': 'reported',
'Own funds': 'reported'}}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, 'IF ({"Type"} = "life insurer") THEN ({"Assets"} = "reported") & ({"TV-life"} = "reported") & ({"TV-nonlife"} = "not reported") & ({"Own funds"} = "reported")',
5, 0, 1],
[1,'Pattern 1', 0, 'IF ({"Type"} = "non-life insurer") THEN ({"Assets"} = "reported") & ({"TV-life"} = "not reported") & ({"TV-nonlife"} = "reported") & ({"Own funds"} = "reported")',
4, 1, 0.8]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(pattern)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 1: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern2(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
pattern = {'name' : 'Pattern 1',
'pattern' : '-->',
'P_columns': ['TV-life', 'Assets'],
'P_values' : [100,0],
'Q_values' : [0,0],
'Q_columns': ['TV-nonlife', 'Own funds'],
'parameters' : {"min_confidence" : 0, "min_support" : 1, 'Q_operators': ['>', '>'],
'P_operators':['<','>'], 'Q_logics':['|'], 'both_ways':False}}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, 'IF ({"TV-life"} < 100) & ({"Assets"} > 0) THEN ({"TV-nonlife"} > 0) | ({"Own funds"} > 0)',
4, 0, 1.0]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(pattern)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 2: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern3(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
pattern = {'name' : 'equal values',
'pattern' : '=',
'value' : 0,
'parameters': {"min_confidence": 0.5,
"min_support" : 2}}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'equal values', 0, '({"TV-nonlife"} = 0)',
6, 4, .6]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(pattern)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 3: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern4(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
pattern = {'name' : 'Pattern 1',
'pattern' : '-->',
'P_columns': ['TV-life'],
'P_values' : [0],
'Q_columns': ['TV-nonlife'],
'Q_values' : [8800],
'parameters' : {"min_confidence" : 0, "min_support" : 1, 'both_ways':True}}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, 'IF ({"TV-life"} = 0) THEN ({"TV-nonlife"} = 8800) AND IF ~({"TV-life"} = 0) THEN ~({"TV-nonlife"} = 8800)',
7, 3, 0.7]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(pattern)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 4: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern5(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
pattern ={'name' : 'sum pattern',
'pattern' : 'sum',
'parameters': {"min_confidence": 0.5,
"min_support" : 1,
"nonzero" : True }}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'sum pattern', 0, '({"TV-life"} + {"Own funds"} = {"Assets"})',
6, 0, 1.0],
[1,'sum pattern', 0, '({"TV-life"} + {"Excess"} = {"Assets"})',
6, 0, 1.0],
[2,'sum pattern', 0, '({"TV-nonlife"} + {"Own funds"} = {"Assets"})',
3, 1, 0.75],
[3,'sum pattern', 0, '({"TV-nonlife"} + {"Excess"} = {"Assets"})',
3, 1, 0.75]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(pattern)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 5: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern6(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
parameters = {'min_confidence': 0.5,'min_support' : 2}
p2 = {'name' : 'Pattern 1',
'expression' : 'IF ({.*TV-life.*} = 0) THEN ({.*TV-nonlife.*} = 8800) AND IF ~({.*TV-life.*} = 0) THEN ~({.*TV-nonlife.*} = 8800)',
'parameters' : parameters }
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, 'IF ({"TV-life"} = 0) THEN ({"TV-nonlife"} = 8800) AND IF ~({"TV-life"} = 0) THEN ~({"TV-nonlife"} = 8800)',
7, 3, 0.7]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 4: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern7(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
p2 = {'name' : 'Pattern 1',
'expression' : 'IF ({.*Ty.*} = [@]) THEN ({.*.*} = [@])'}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, 'IF ({"Type"} = "non-life insurer") THEN ({"TV-life"} = 0)',
4, 1, 0.8],
[1,'Pattern 1', 0, 'IF ({"Type"} = "life insurer") THEN ({"TV-nonlife"} = 0)',
5, 0, 1.0],
[2,'Pattern 1', 0, 'IF ({"Type"} = "life insurer") THEN ({"Own funds"} = 200)',
4, 1, 0.8],
[3,'Pattern 1', 0, 'IF ({"Type"} = "life insurer") THEN ({"Excess"} = 200.0)',
4, 1, 0.8]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 7: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern8(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
parameters = {'min_confidence': 0.3,'min_support' : 1, 'percentile' : 90}
p2 = {'name' : 'Pattern 1',
'pattern' : 'percentile',
'columns' : [ 'TV-nonlife', 'Own funds'],
'parameters':parameters}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 0, '({"TV-nonlife"} >= 0.0) & ({"TV-nonlife"} <= 6280.0)',
9, 1, 0.9],
[1,'Pattern 1', 0, '({"Own funds"} >= 145.0) & ({"Own funds"} <= 755.0)',
8, 2, 0.8]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 8: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern9(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'non-life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
p2 = {'name' : 'Pattern 1', 'cluster':'Type',
'pattern' : '='}
# Expected output
expected = pd.DataFrame(columns = ['index','pattern_id', 'cluster', 'pattern_def', 'support', 'exceptions',
'confidence'],
data = [[0,'Pattern 1', 'life insurer', '({"Own funds"} = {"Excess"})',
5,0,1.0],
[1,'Pattern 1', 'non-life insurer', '({"Own funds"} = {"Excess"})',
5,0,1.0]])
expected.set_index('index', inplace = True)
expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
actual = data_patterns.PatternDataFrame(actual.loc[:, 'pattern_id': 'confidence'])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 9: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern10(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurerx', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
df.set_index('Name', inplace = True)
p2 = {'name' : 'Pattern 1', 'expression':'IF {.*TV-l.*} =[@] THEN {.*Typ.*}= [@]'}
# Expected output
expected = pd.DataFrame(columns = ['Name', 'Type', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 'life insurer', 1000, 800, 0, 200, 200],
['Insurer 2', 'non-life insurer', 4000, 0, 3200, 800, 800],
['Insurer 3', 'non-life insurer', 800, 0, 700, 100, 100],
['Insurer 4', 'life insurer', 2500, 1800, 0, 700, 700],
['Insurer 5', 'non-life insurer', 2100, 0, 2200, 200, 200],
['Insurer 6', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 7', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 8', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 9', 'life insurer', 9000, 8800, 0, 200, 200],
['Insurer 10', 'non-life insurer', 9000, 0, 8800, 200, 199.99]])
expected.set_index('Name', inplace = True)
# expected = data_patterns.PatternDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
df_ana = p.analyze()
actual = p.correct_data()
# Assert
self.assertEqual(type(actual[0]), type(expected), "Pattern test 10: types do not match")
pd.testing.assert_frame_equal(actual[0], expected)
def test_pattern11(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'periode', 'Assets' ],
data = [['Insurer 1', 2018, 1000 ],
['Insurer 2', 2018, 4000 ],
['Insurer 1', 2019, 800 ],
['Insurer 2', 2019, 2500]])
miner = data_patterns.PatternMiner(df)
df_patterns = miner.convert_columns_to_time('Name','periode')
actual = df_patterns.reset_index()
# Expected output
expected = pd.DataFrame(columns = ['Name', 'Datapoint', '2018', '2019'],
data = [['Insurer 1', 'Assets' ,1000 ,800],
['Insurer 2', 'Assets', 4000 ,2500]])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 11: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern12(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'periode', 'Assets', 'TV-life', 'TV-nonlife' , 'Own funds', 'Excess'],
data = [['Insurer 1', 2018, 1000, 800, 0, 200, 200],
['Insurer 2', 2018, 4000, 0, 3200, 800, 800],
['Insurer 1', 2019, 800, 0, 700, 100, 100],
['Insurer 2', 2019, 2500, 1800, 0, 700, 700]])
df['periode'] = pd.to_datetime(df['periode'],format='%Y')
miner = data_patterns.PatternMiner(df)
df_patterns = miner.convert_to_time(['Name'],'periode')
actual = df_patterns.reset_index()
# Expected output
expected = pd.DataFrame(columns = ['periode', 'Name','Assets (t-1)', 'TV-life (t-1)', 'TV-nonlife (t-1)', 'Own funds (t-1)',
'Excess (t-1)', 'Assets (t)', 'TV-life (t)', 'TV-nonlife (t)',
'Own funds (t)', 'Excess (t)'],
data = [['2018 - 2019', 'Insurer 1', 1000 ,800 ,0 ,200 ,200, 800, 0 ,700, 100 ,100],
['2018 - 2019' ,'Insurer 2' ,4000 ,0 ,3200 ,800 ,800 ,2500, 1800, 0 ,700 ,700]])
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 12: types do not match")
pd.testing.assert_frame_equal(actual, expected)
def test_pattern13(self):
"""Test of read input date function"""
# Input
df = pd.DataFrame(columns = ['Name', 'periode', 'Assets' ],
data = [['Insurer 1', 2018, 0 ],
['Insurer 2', 2018, 10 ],
['Insurer 1', 2019, 0 ],
['Insurer 2', 2019, 10]])
p2 = {'name' : 'Pattern 1', 'expression':'IF {.*Name.*} =[@] THEN {.*As.*}= [@]'}
# Expected output
expected = pd.DataFrame(columns = ['index','result_type', 'pattern_id', 'cluster', 'support', 'exceptions',
'confidence', 'pattern_def', 'P values', 'Q values'],
data = [[0,True, 'Pattern 1', 0 ,2 ,0, 1.0,
'IF {"Name"} ="Insurer 1" THEN {"Assets"}= 0' ,'Insurer 1', 0],
[1,True ,'Pattern 1', 0, 2 ,0 ,1.0,
'IF {"Name"} ="Insurer 2" THEN {"Assets"}= 10' ,'Insurer 2', 10],
[2,True ,'Pattern 1', 0 ,2 ,0, 1.0,
'IF {"Name"} ="Insurer 1" THEN {"Assets"}= 0' ,'Insurer 1', 0],
[3,True, 'Pattern 1' ,0 ,2, 0, 1.0,
'IF {"Name"} ="Insurer 2" THEN {"Assets"}= 10', 'Insurer 2', 10]])
expected.set_index('index', inplace = True)
expected = data_patterns.ResultDataFrame(expected)
# Actual output
p = data_patterns.PatternMiner(df)
actual = p.find(p2)
actual = p.analyze()
# Assert
self.assertEqual(type(actual), type(expected), "Pattern test 9: types do not match")
pd.testing.assert_frame_equal(actual, expected)
| 67.908142
| 223
| 0.414043
| 3,062
| 32,528
| 4.34324
| 0.0516
| 0.096774
| 0.065494
| 0.058952
| 0.902023
| 0.878036
| 0.869163
| 0.838183
| 0.826679
| 0.798707
| 0
| 0.127468
| 0.440943
| 32,528
| 478
| 224
| 68.050209
| 0.603849
| 0.034555
| 0
| 0.66129
| 0
| 0.018817
| 0.244724
| 0
| 0
| 0
| 0
| 0
| 0.069892
| 1
| 0.034946
| false
| 0
| 0.010753
| 0
| 0.048387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3075c21684cca8ce2353c39521986d6f6a14ff36
| 5,197
|
py
|
Python
|
opendis/misc.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
opendis/misc.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
opendis/misc.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
# Maybe remove these. Not really used anymore.
class PduContainer( object ):
"""Used for XML compatability. A container that holds PDUs"""
def __init__(self):
""" Initializer for PduContainer"""
self.numberOfPdus = 0
""" Number of PDUs in the container list"""
self.pdus = []
""" record sets"""
def serialize(self, outputStream):
"""serialize the class """
outputStream.write_int( len(self.pdus));
for anObj in self.pdus:
anObj.serialize(outputStream)
def parse(self, inputStream):
""""Parse a message. This may recursively call embedded objects."""
self.numberOfPdus = inputStream.read_int();
for idx in range(0, self.numberOfPdus):
element = null()
element.parse(inputStream)
self.pdus.append(element)
class PduStream( object ):
"""Non-DIS class, used to describe streams of PDUS when logging data to a SQL database. This is not in the DIS standard but can be helpful when logging to a Hibernate sql database"""
def __init__(self):
""" Initializer for PduStream"""
self.description = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
""" Longish description of this PDU stream"""
self.name = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
""" short description of this PDU stream"""
self.startTime = 0
""" Start time of recording, in Unix time (seconds since epoch)"""
self.stopTime = 0
""" stop time of recording, in Unix time (seconds since epoch)"""
self.pduCount = 0
""" how many PDUs in this stream"""
self.pdusInStream = []
""" variable length list of PDUs"""
def serialize(self, outputStream):
"""serialize the class """
for idx in range(0, 512):
outputStream.write_byte( self.description[ idx ] );
for idx in range(0, 256):
outputStream.write_byte( self.name[ idx ] );
outputStream.write_long(self.startTime);
outputStream.write_long(self.stopTime);
outputStream.write_unsigned_int( len(self.pdusInStream));
for anObj in self.pdusInStream:
anObj.serialize(outputStream)
def parse(self, inputStream):
""""Parse a message. This may recursively call embedded objects."""
self.description = [0]*512
for idx in range(0, 512):
val = inputStream.read_byte()
self.description[ idx ] = val
self.name = [0]*256
for idx in range(0, 256):
val = inputStream.read_byte()
self.name[ idx ] = val
self.startTime = inputStream.read_long();
self.stopTime = inputStream.read_long();
self.pduCount = inputStream.read_unsigned_int();
for idx in range(0, self.pduCount):
element = null()
element.parse(inputStream)
self.pdusInStream.append(element)
| 57.744444
| 1,565
| 0.482971
| 1,100
| 5,197
| 2.261818
| 0.089091
| 0.615756
| 0.921222
| 1.22508
| 0.604502
| 0.563505
| 0.481511
| 0.428457
| 0.428457
| 0.428457
| 0
| 0.22247
| 0.309794
| 5,197
| 89
| 1,566
| 58.393258
| 0.471146
| 0.096017
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
062a241ff9c87897b86745d5dfb1554ce9ff90d3
| 115
|
py
|
Python
|
pesto/backend/utils/utils.py
|
saromanov/pesto
|
b14e92ac8295400fb56d84d7d05d5450e7dc7d61
|
[
"MIT"
] | null | null | null |
pesto/backend/utils/utils.py
|
saromanov/pesto
|
b14e92ac8295400fb56d84d7d05d5450e7dc7d61
|
[
"MIT"
] | 15
|
2021-01-09T18:54:03.000Z
|
2022-03-12T00:21:09.000Z
|
pesto/backend/utils/utils.py
|
saromanov/pesto
|
b14e92ac8295400fb56d84d7d05d5450e7dc7d61
|
[
"MIT"
] | null | null | null |
import datetime
def time_now_formatted(title):
return datetime.datetime.now().strftime(f'{title}:%Y:%m:%d:%H')
| 28.75
| 67
| 0.721739
| 18
| 115
| 4.5
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 115
| 4
| 67
| 28.75
| 0.771429
| 0
| 0
| 0
| 0
| 0
| 0.163793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ebec2f6ce6373eb48b860c020ed743f8f695a6f3
| 31
|
py
|
Python
|
models/__init__.py
|
alarca94/senti-transfer
|
da83a072c8d471bc74aa25b237b5e301502db869
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
alarca94/senti-transfer
|
da83a072c8d471bc74aa25b237b5e301502db869
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
alarca94/senti-transfer
|
da83a072c8d471bc74aa25b237b5e301502db869
|
[
"MIT"
] | null | null | null |
from .transformers import BETO
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
230fff0372659e7e279ca5a94a8f9d12b57998c6
| 85
|
py
|
Python
|
warehouse_management/warehouse_management/doctype/warehouse_receipt/warehouse_receipt.py
|
mohsinalimat/warehouse_management
|
691e9e465a75cd06f551d802e5c20a8b6b332df4
|
[
"MIT"
] | 2
|
2021-08-04T07:31:27.000Z
|
2021-12-27T21:59:50.000Z
|
warehouse_management/warehouse_management/doctype/warehouse_receipt/warehouse_receipt.py
|
mohsinalimat/warehouse_management
|
691e9e465a75cd06f551d802e5c20a8b6b332df4
|
[
"MIT"
] | null | null | null |
warehouse_management/warehouse_management/doctype/warehouse_receipt/warehouse_receipt.py
|
mohsinalimat/warehouse_management
|
691e9e465a75cd06f551d802e5c20a8b6b332df4
|
[
"MIT"
] | 3
|
2021-08-04T07:31:28.000Z
|
2021-11-03T13:41:49.000Z
|
from frappe.model.document import Document
class WarehouseReceipt(Document):
pass
| 14.166667
| 42
| 0.823529
| 10
| 85
| 7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 85
| 5
| 43
| 17
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
235c17dc3c7fd7cced9e87a94f4721f8b8ec9ac7
| 134
|
py
|
Python
|
AdvancedSimulator/__init__.py
|
dan1510123/stock-history-simulator
|
a970531f650513a4c76c250796aeecc4d7e4c39b
|
[
"MIT"
] | 1
|
2021-12-25T21:06:50.000Z
|
2021-12-25T21:06:50.000Z
|
AdvancedSimulator/__init__.py
|
dan1510123/stock-history-simulator
|
a970531f650513a4c76c250796aeecc4d7e4c39b
|
[
"MIT"
] | null | null | null |
AdvancedSimulator/__init__.py
|
dan1510123/stock-history-simulator
|
a970531f650513a4c76c250796aeecc4d7e4c39b
|
[
"MIT"
] | null | null | null |
print(f'Invoking __init__.py for {__name__}')
import AdvancedSimulator.SimulatorSetupGUI
import AdvancedSimulator.AdvancedSimulatorGUI
| 44.666667
| 45
| 0.880597
| 13
| 134
| 8.461538
| 0.846154
| 0.418182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 134
| 3
| 46
| 44.666667
| 0.866142
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88c710e8ffab8672cb0227fd295be88493198e6e
| 146
|
py
|
Python
|
dvats/all.py
|
vrodriguezf/deepvats
|
56f95b7d05e014ac0aefc87fc16627a38d1ebbf9
|
[
"Apache-2.0"
] | 2
|
2022-02-07T17:48:55.000Z
|
2022-02-07T17:48:57.000Z
|
dvats/all.py
|
pacmel/timecluster_hub
|
252cc8ef28af50501c6eba2d2c26dd5e8235bed6
|
[
"Apache-2.0"
] | 38
|
2021-09-24T08:53:58.000Z
|
2021-11-24T09:54:49.000Z
|
dvats/all.py
|
pacmel/timecluster_hub
|
252cc8ef28af50501c6eba2d2c26dd5e8235bed6
|
[
"Apache-2.0"
] | null | null | null |
import dvats
from .imports import *
from .load import *
from .utils import *
from .dr import *
from .encoder import *
from .visualization import *
| 20.857143
| 28
| 0.746575
| 20
| 146
| 5.45
| 0.45
| 0.458716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171233
| 146
| 7
| 28
| 20.857143
| 0.900826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88ee5cda71b3b5b1bad30ead900aaa4aa14bf8dd
| 23
|
py
|
Python
|
vae/src/models/__init__.py
|
ioangatop/GenerativeModels
|
c6924e91de475be36253f9f20b687d1e1c8b0dde
|
[
"MIT"
] | 4
|
2019-12-04T06:10:23.000Z
|
2021-09-14T06:17:24.000Z
|
vae/src/models/__init__.py
|
ioangatop/GenerativeModels
|
c6924e91de475be36253f9f20b687d1e1c8b0dde
|
[
"MIT"
] | null | null | null |
vae/src/models/__init__.py
|
ioangatop/GenerativeModels
|
c6924e91de475be36253f9f20b687d1e1c8b0dde
|
[
"MIT"
] | 1
|
2021-09-16T21:10:12.000Z
|
2021-09-16T21:10:12.000Z
|
from .model import VAE
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88f3dc4813af93dd0504013c0f3a4cadbb02bda2
| 24
|
py
|
Python
|
hylium/__init__.py
|
kpavao84/hylium
|
8778091d5b8fc0d7819e2237c2bb2f324e3911ec
|
[
"MIT"
] | 1
|
2021-06-08T22:15:53.000Z
|
2021-06-08T22:15:53.000Z
|
hylium/__init__.py
|
kpavao84/hylium
|
8778091d5b8fc0d7819e2237c2bb2f324e3911ec
|
[
"MIT"
] | 1
|
2021-06-01T22:46:31.000Z
|
2021-06-01T22:46:31.000Z
|
hylium/__init__.py
|
kwpav/hylium
|
8778091d5b8fc0d7819e2237c2bb2f324e3911ec
|
[
"MIT"
] | null | null | null |
import hy
import hylium
| 8
| 13
| 0.833333
| 4
| 24
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 2
| 14
| 12
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88ffbbd758a3d3a7dc02c68869744837506941dc
| 20
|
py
|
Python
|
tests/test_cache.py
|
groutr/conda-tools
|
f421fd324f306a713c0cb1a3845306758ff981f4
|
[
"BSD-3-Clause"
] | 11
|
2016-07-07T00:27:01.000Z
|
2019-12-02T08:47:16.000Z
|
tests/test_cache.py
|
groutr/conda-tools
|
f421fd324f306a713c0cb1a3845306758ff981f4
|
[
"BSD-3-Clause"
] | 8
|
2016-07-15T14:59:27.000Z
|
2019-07-03T18:05:34.000Z
|
tests/test_cache.py
|
groutr/conda-tools
|
f421fd324f306a713c0cb1a3845306758ff981f4
|
[
"BSD-3-Clause"
] | 2
|
2016-07-13T22:24:51.000Z
|
2016-11-16T18:03:46.000Z
|
from .. import cache
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cc35208e2e1aed04cfc61463ff4a100140dc637a
| 136
|
py
|
Python
|
src/python/packages/wgne/__init__.py
|
susburrows/uvcmetrics
|
5a3c1266f3e5e97398a7671b01fa2816fb307c38
|
[
"X11",
"MIT"
] | null | null | null |
src/python/packages/wgne/__init__.py
|
susburrows/uvcmetrics
|
5a3c1266f3e5e97398a7671b01fa2816fb307c38
|
[
"X11",
"MIT"
] | null | null | null |
src/python/packages/wgne/__init__.py
|
susburrows/uvcmetrics
|
5a3c1266f3e5e97398a7671b01fa2816fb307c38
|
[
"X11",
"MIT"
] | null | null | null |
import io
from mean_climate_metrics_calculations import compute_metrics
import rms_xyt,cor_xyt,bias,rms_xy,annual_mean
import git
pass
| 19.428571
| 61
| 0.882353
| 23
| 136
| 4.869565
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 136
| 6
| 62
| 22.666667
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
cc5f65d19406e9854c53137ff9185eb7ea70ba1c
| 80,804
|
py
|
Python
|
pyinjective/proto/exchange/injective_spot_exchange_rpc_pb2.py
|
CtheSky/sdk-python
|
c1b1ae931f4970832466a004eb193027bdc1dea5
|
[
"Apache-2.0"
] | 10
|
2021-09-07T08:03:52.000Z
|
2022-03-08T08:39:30.000Z
|
pyinjective/proto/exchange/injective_spot_exchange_rpc_pb2.py
|
CtheSky/sdk-python
|
c1b1ae931f4970832466a004eb193027bdc1dea5
|
[
"Apache-2.0"
] | 39
|
2021-08-19T20:09:35.000Z
|
2022-03-22T19:51:59.000Z
|
pyinjective/proto/exchange/injective_spot_exchange_rpc_pb2.py
|
CtheSky/sdk-python
|
c1b1ae931f4970832466a004eb193027bdc1dea5
|
[
"Apache-2.0"
] | 5
|
2021-11-02T16:23:48.000Z
|
2022-01-20T22:30:05.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: exchange/injective_spot_exchange_rpc.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='exchange/injective_spot_exchange_rpc.proto',
package='injective_spot_exchange_rpc',
syntax='proto3',
serialized_options=b'Z\036/injective_spot_exchange_rpcpb',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n*exchange/injective_spot_exchange_rpc.proto\x12\x1binjective_spot_exchange_rpc\"P\n\x0eMarketsRequest\x12\x15\n\rmarket_status\x18\x01 \x01(\t\x12\x12\n\nbase_denom\x18\x02 \x01(\t\x12\x13\n\x0bquote_denom\x18\x03 \x01(\t\"O\n\x0fMarketsResponse\x12<\n\x07markets\x18\x01 \x03(\x0b\x32+.injective_spot_exchange_rpc.SpotMarketInfo\"\x81\x03\n\x0eSpotMarketInfo\x12\x11\n\tmarket_id\x18\x01 \x01(\t\x12\x15\n\rmarket_status\x18\x02 \x01(\t\x12\x0e\n\x06ticker\x18\x03 \x01(\t\x12\x12\n\nbase_denom\x18\x04 \x01(\t\x12?\n\x0f\x62\x61se_token_meta\x18\x05 \x01(\x0b\x32&.injective_spot_exchange_rpc.TokenMeta\x12\x13\n\x0bquote_denom\x18\x06 \x01(\t\x12@\n\x10quote_token_meta\x18\x07 \x01(\x0b\x32&.injective_spot_exchange_rpc.TokenMeta\x12\x16\n\x0emaker_fee_rate\x18\x08 \x01(\t\x12\x16\n\x0etaker_fee_rate\x18\t \x01(\t\x12\x1c\n\x14service_provider_fee\x18\n \x01(\t\x12\x1b\n\x13min_price_tick_size\x18\x0b \x01(\t\x12\x1e\n\x16min_quantity_tick_size\x18\x0c \x01(\t\"n\n\tTokenMeta\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\t\x12\x0e\n\x06symbol\x18\x03 \x01(\t\x12\x0c\n\x04logo\x18\x04 \x01(\t\x12\x10\n\x08\x64\x65\x63imals\x18\x05 \x01(\x11\x12\x12\n\nupdated_at\x18\x06 \x01(\x12\"\"\n\rMarketRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\"M\n\x0eMarketResponse\x12;\n\x06market\x18\x01 \x01(\x0b\x32+.injective_spot_exchange_rpc.SpotMarketInfo\"*\n\x14StreamMarketsRequest\x12\x12\n\nmarket_ids\x18\x01 \x03(\t\"\x7f\n\x15StreamMarketsResponse\x12;\n\x06market\x18\x01 \x01(\x0b\x32+.injective_spot_exchange_rpc.SpotMarketInfo\x12\x16\n\x0eoperation_type\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x12\"%\n\x10OrderbookRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\"W\n\x11OrderbookResponse\x12\x42\n\torderbook\x18\x01 \x01(\x0b\x32/.injective_spot_exchange_rpc.SpotLimitOrderbook\"\x83\x01\n\x12SpotLimitOrderbook\x12\x35\n\x04\x62uys\x18\x01 \x03(\x0b\x32\'.injective_spot_exchange_rpc.PriceLevel\x12\x36\n\x05sells\x18\x02 \x03(\x0b\x32\'.injective_spot_exchange_rpc.PriceLevel\"@\n\nPriceLevel\x12\r\n\x05price\x18\x01 \x01(\t\x12\x10\n\x08quantity\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x12\",\n\x16StreamOrderbookRequest\x12\x12\n\nmarket_ids\x18\x01 \x03(\t\"\x9b\x01\n\x17StreamOrderbookResponse\x12\x42\n\torderbook\x18\x01 \x01(\x0b\x32/.injective_spot_exchange_rpc.SpotLimitOrderbook\x12\x16\n\x0eoperation_type\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x12\x12\x11\n\tmarket_id\x18\x04 \x01(\t\"M\n\rOrdersRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\x12\x12\n\norder_side\x18\x02 \x01(\t\x12\x15\n\rsubaccount_id\x18\x03 \x01(\t\"M\n\x0eOrdersResponse\x12;\n\x06orders\x18\x01 \x03(\x0b\x32+.injective_spot_exchange_rpc.SpotLimitOrder\"\x83\x02\n\x0eSpotLimitOrder\x12\x12\n\norder_hash\x18\x01 \x01(\t\x12\x12\n\norder_side\x18\x02 \x01(\t\x12\x11\n\tmarket_id\x18\x03 \x01(\t\x12\x15\n\rsubaccount_id\x18\x04 \x01(\t\x12\r\n\x05price\x18\x05 \x01(\t\x12\x10\n\x08quantity\x18\x06 \x01(\t\x12\x19\n\x11unfilled_quantity\x18\x07 \x01(\t\x12\x15\n\rtrigger_price\x18\x08 \x01(\t\x12\x15\n\rfee_recipient\x18\t \x01(\t\x12\r\n\x05state\x18\n \x01(\t\x12\x12\n\ncreated_at\x18\x0b \x01(\x12\x12\x12\n\nupdated_at\x18\x0c \x01(\x12\"S\n\x13StreamOrdersRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\x12\x12\n\norder_side\x18\x02 \x01(\t\x12\x15\n\rsubaccount_id\x18\x03 \x01(\t\"}\n\x14StreamOrdersResponse\x12:\n\x05order\x18\x01 \x01(\x0b\x32+.injective_spot_exchange_rpc.SpotLimitOrder\x12\x16\n\x0eoperation_type\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x12\"\x81\x01\n\rTradesRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\x12\x16\n\x0e\x65xecution_side\x18\x02 \x01(\t\x12\x11\n\tdirection\x18\x03 \x01(\t\x12\x15\n\rsubaccount_id\x18\x04 \x01(\t\x12\x0c\n\x04skip\x18\x05 \x01(\x04\x12\r\n\x05limit\x18\x06 \x01(\x11\"H\n\x0eTradesResponse\x12\x36\n\x06trades\x18\x01 \x03(\x0b\x32&.injective_spot_exchange_rpc.SpotTrade\"\xf1\x01\n\tSpotTrade\x12\x12\n\norder_hash\x18\x01 \x01(\t\x12\x15\n\rsubaccount_id\x18\x02 \x01(\t\x12\x11\n\tmarket_id\x18\x03 \x01(\t\x12\x1c\n\x14trade_execution_type\x18\x04 \x01(\t\x12\x17\n\x0ftrade_direction\x18\x05 \x01(\t\x12\x36\n\x05price\x18\x06 \x01(\x0b\x32\'.injective_spot_exchange_rpc.PriceLevel\x12\x0b\n\x03\x66\x65\x65\x18\x07 \x01(\t\x12\x13\n\x0b\x65xecuted_at\x18\x08 \x01(\x12\x12\x15\n\rfee_recipient\x18\t \x01(\t\"\x87\x01\n\x13StreamTradesRequest\x12\x11\n\tmarket_id\x18\x01 \x01(\t\x12\x16\n\x0e\x65xecution_side\x18\x02 \x01(\t\x12\x11\n\tdirection\x18\x03 \x01(\t\x12\x15\n\rsubaccount_id\x18\x04 \x01(\t\x12\x0c\n\x04skip\x18\x05 \x01(\x04\x12\r\n\x05limit\x18\x06 \x01(\x11\"x\n\x14StreamTradesResponse\x12\x35\n\x05trade\x18\x01 \x01(\x0b\x32&.injective_spot_exchange_rpc.SpotTrade\x12\x16\n\x0eoperation_type\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\x12\"G\n\x1bSubaccountOrdersListRequest\x12\x15\n\rsubaccount_id\x18\x01 \x01(\t\x12\x11\n\tmarket_id\x18\x02 \x01(\t\"[\n\x1cSubaccountOrdersListResponse\x12;\n\x06orders\x18\x01 \x03(\x0b\x32+.injective_spot_exchange_rpc.SpotLimitOrder\"r\n\x1bSubaccountTradesListRequest\x12\x15\n\rsubaccount_id\x18\x01 \x01(\t\x12\x11\n\tmarket_id\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_type\x18\x03 \x01(\t\x12\x11\n\tdirection\x18\x04 \x01(\t\"V\n\x1cSubaccountTradesListResponse\x12\x36\n\x06trades\x18\x01 \x03(\x0b\x32&.injective_spot_exchange_rpc.SpotTrade2\x99\n\n\x18InjectiveSpotExchangeRPC\x12\x64\n\x07Markets\x12+.injective_spot_exchange_rpc.MarketsRequest\x1a,.injective_spot_exchange_rpc.MarketsResponse\x12\x61\n\x06Market\x12*.injective_spot_exchange_rpc.MarketRequest\x1a+.injective_spot_exchange_rpc.MarketResponse\x12x\n\rStreamMarkets\x12\x31.injective_spot_exchange_rpc.StreamMarketsRequest\x1a\x32.injective_spot_exchange_rpc.StreamMarketsResponse0\x01\x12j\n\tOrderbook\x12-.injective_spot_exchange_rpc.OrderbookRequest\x1a..injective_spot_exchange_rpc.OrderbookResponse\x12~\n\x0fStreamOrderbook\x12\x33.injective_spot_exchange_rpc.StreamOrderbookRequest\x1a\x34.injective_spot_exchange_rpc.StreamOrderbookResponse0\x01\x12\x61\n\x06Orders\x12*.injective_spot_exchange_rpc.OrdersRequest\x1a+.injective_spot_exchange_rpc.OrdersResponse\x12u\n\x0cStreamOrders\x12\x30.injective_spot_exchange_rpc.StreamOrdersRequest\x1a\x31.injective_spot_exchange_rpc.StreamOrdersResponse0\x01\x12\x61\n\x06Trades\x12*.injective_spot_exchange_rpc.TradesRequest\x1a+.injective_spot_exchange_rpc.TradesResponse\x12u\n\x0cStreamTrades\x12\x30.injective_spot_exchange_rpc.StreamTradesRequest\x1a\x31.injective_spot_exchange_rpc.StreamTradesResponse0\x01\x12\x8b\x01\n\x14SubaccountOrdersList\x12\x38.injective_spot_exchange_rpc.SubaccountOrdersListRequest\x1a\x39.injective_spot_exchange_rpc.SubaccountOrdersListResponse\x12\x8b\x01\n\x14SubaccountTradesList\x12\x38.injective_spot_exchange_rpc.SubaccountTradesListRequest\x1a\x39.injective_spot_exchange_rpc.SubaccountTradesListResponseB Z\x1e/injective_spot_exchange_rpcpbb\x06proto3'
)
_MARKETSREQUEST = _descriptor.Descriptor(
name='MarketsRequest',
full_name='injective_spot_exchange_rpc.MarketsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_status', full_name='injective_spot_exchange_rpc.MarketsRequest.market_status', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='base_denom', full_name='injective_spot_exchange_rpc.MarketsRequest.base_denom', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quote_denom', full_name='injective_spot_exchange_rpc.MarketsRequest.quote_denom', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=75,
serialized_end=155,
)
_MARKETSRESPONSE = _descriptor.Descriptor(
name='MarketsResponse',
full_name='injective_spot_exchange_rpc.MarketsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='markets', full_name='injective_spot_exchange_rpc.MarketsResponse.markets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=236,
)
_SPOTMARKETINFO = _descriptor.Descriptor(
name='SpotMarketInfo',
full_name='injective_spot_exchange_rpc.SpotMarketInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.SpotMarketInfo.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_status', full_name='injective_spot_exchange_rpc.SpotMarketInfo.market_status', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ticker', full_name='injective_spot_exchange_rpc.SpotMarketInfo.ticker', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='base_denom', full_name='injective_spot_exchange_rpc.SpotMarketInfo.base_denom', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='base_token_meta', full_name='injective_spot_exchange_rpc.SpotMarketInfo.base_token_meta', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quote_denom', full_name='injective_spot_exchange_rpc.SpotMarketInfo.quote_denom', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quote_token_meta', full_name='injective_spot_exchange_rpc.SpotMarketInfo.quote_token_meta', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='maker_fee_rate', full_name='injective_spot_exchange_rpc.SpotMarketInfo.maker_fee_rate', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='taker_fee_rate', full_name='injective_spot_exchange_rpc.SpotMarketInfo.taker_fee_rate', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_provider_fee', full_name='injective_spot_exchange_rpc.SpotMarketInfo.service_provider_fee', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_price_tick_size', full_name='injective_spot_exchange_rpc.SpotMarketInfo.min_price_tick_size', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_quantity_tick_size', full_name='injective_spot_exchange_rpc.SpotMarketInfo.min_quantity_tick_size', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=239,
serialized_end=624,
)
_TOKENMETA = _descriptor.Descriptor(
name='TokenMeta',
full_name='injective_spot_exchange_rpc.TokenMeta',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='injective_spot_exchange_rpc.TokenMeta.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='address', full_name='injective_spot_exchange_rpc.TokenMeta.address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symbol', full_name='injective_spot_exchange_rpc.TokenMeta.symbol', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='logo', full_name='injective_spot_exchange_rpc.TokenMeta.logo', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decimals', full_name='injective_spot_exchange_rpc.TokenMeta.decimals', index=4,
number=5, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='injective_spot_exchange_rpc.TokenMeta.updated_at', index=5,
number=6, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=626,
serialized_end=736,
)
_MARKETREQUEST = _descriptor.Descriptor(
name='MarketRequest',
full_name='injective_spot_exchange_rpc.MarketRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.MarketRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=738,
serialized_end=772,
)
_MARKETRESPONSE = _descriptor.Descriptor(
name='MarketResponse',
full_name='injective_spot_exchange_rpc.MarketResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market', full_name='injective_spot_exchange_rpc.MarketResponse.market', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=774,
serialized_end=851,
)
_STREAMMARKETSREQUEST = _descriptor.Descriptor(
name='StreamMarketsRequest',
full_name='injective_spot_exchange_rpc.StreamMarketsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_ids', full_name='injective_spot_exchange_rpc.StreamMarketsRequest.market_ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=853,
serialized_end=895,
)
_STREAMMARKETSRESPONSE = _descriptor.Descriptor(
name='StreamMarketsResponse',
full_name='injective_spot_exchange_rpc.StreamMarketsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market', full_name='injective_spot_exchange_rpc.StreamMarketsResponse.market', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operation_type', full_name='injective_spot_exchange_rpc.StreamMarketsResponse.operation_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='injective_spot_exchange_rpc.StreamMarketsResponse.timestamp', index=2,
number=3, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=897,
serialized_end=1024,
)
_ORDERBOOKREQUEST = _descriptor.Descriptor(
name='OrderbookRequest',
full_name='injective_spot_exchange_rpc.OrderbookRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.OrderbookRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1026,
serialized_end=1063,
)
_ORDERBOOKRESPONSE = _descriptor.Descriptor(
name='OrderbookResponse',
full_name='injective_spot_exchange_rpc.OrderbookResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orderbook', full_name='injective_spot_exchange_rpc.OrderbookResponse.orderbook', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1065,
serialized_end=1152,
)
_SPOTLIMITORDERBOOK = _descriptor.Descriptor(
name='SpotLimitOrderbook',
full_name='injective_spot_exchange_rpc.SpotLimitOrderbook',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='buys', full_name='injective_spot_exchange_rpc.SpotLimitOrderbook.buys', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sells', full_name='injective_spot_exchange_rpc.SpotLimitOrderbook.sells', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1155,
serialized_end=1286,
)
_PRICELEVEL = _descriptor.Descriptor(
name='PriceLevel',
full_name='injective_spot_exchange_rpc.PriceLevel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='price', full_name='injective_spot_exchange_rpc.PriceLevel.price', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quantity', full_name='injective_spot_exchange_rpc.PriceLevel.quantity', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='injective_spot_exchange_rpc.PriceLevel.timestamp', index=2,
number=3, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1288,
serialized_end=1352,
)
_STREAMORDERBOOKREQUEST = _descriptor.Descriptor(
name='StreamOrderbookRequest',
full_name='injective_spot_exchange_rpc.StreamOrderbookRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_ids', full_name='injective_spot_exchange_rpc.StreamOrderbookRequest.market_ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1354,
serialized_end=1398,
)
_STREAMORDERBOOKRESPONSE = _descriptor.Descriptor(
name='StreamOrderbookResponse',
full_name='injective_spot_exchange_rpc.StreamOrderbookResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orderbook', full_name='injective_spot_exchange_rpc.StreamOrderbookResponse.orderbook', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operation_type', full_name='injective_spot_exchange_rpc.StreamOrderbookResponse.operation_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='injective_spot_exchange_rpc.StreamOrderbookResponse.timestamp', index=2,
number=3, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.StreamOrderbookResponse.market_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1401,
serialized_end=1556,
)
_ORDERSREQUEST = _descriptor.Descriptor(
name='OrdersRequest',
full_name='injective_spot_exchange_rpc.OrdersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.OrdersRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_side', full_name='injective_spot_exchange_rpc.OrdersRequest.order_side', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.OrdersRequest.subaccount_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1558,
serialized_end=1635,
)
_ORDERSRESPONSE = _descriptor.Descriptor(
name='OrdersResponse',
full_name='injective_spot_exchange_rpc.OrdersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orders', full_name='injective_spot_exchange_rpc.OrdersResponse.orders', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1637,
serialized_end=1714,
)
_SPOTLIMITORDER = _descriptor.Descriptor(
name='SpotLimitOrder',
full_name='injective_spot_exchange_rpc.SpotLimitOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='order_hash', full_name='injective_spot_exchange_rpc.SpotLimitOrder.order_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_side', full_name='injective_spot_exchange_rpc.SpotLimitOrder.order_side', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.SpotLimitOrder.market_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.SpotLimitOrder.subaccount_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='price', full_name='injective_spot_exchange_rpc.SpotLimitOrder.price', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quantity', full_name='injective_spot_exchange_rpc.SpotLimitOrder.quantity', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unfilled_quantity', full_name='injective_spot_exchange_rpc.SpotLimitOrder.unfilled_quantity', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trigger_price', full_name='injective_spot_exchange_rpc.SpotLimitOrder.trigger_price', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fee_recipient', full_name='injective_spot_exchange_rpc.SpotLimitOrder.fee_recipient', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='injective_spot_exchange_rpc.SpotLimitOrder.state', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_at', full_name='injective_spot_exchange_rpc.SpotLimitOrder.created_at', index=10,
number=11, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='updated_at', full_name='injective_spot_exchange_rpc.SpotLimitOrder.updated_at', index=11,
number=12, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1717,
serialized_end=1976,
)
_STREAMORDERSREQUEST = _descriptor.Descriptor(
name='StreamOrdersRequest',
full_name='injective_spot_exchange_rpc.StreamOrdersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.StreamOrdersRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_side', full_name='injective_spot_exchange_rpc.StreamOrdersRequest.order_side', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.StreamOrdersRequest.subaccount_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1978,
serialized_end=2061,
)
_STREAMORDERSRESPONSE = _descriptor.Descriptor(
name='StreamOrdersResponse',
full_name='injective_spot_exchange_rpc.StreamOrdersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='order', full_name='injective_spot_exchange_rpc.StreamOrdersResponse.order', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operation_type', full_name='injective_spot_exchange_rpc.StreamOrdersResponse.operation_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='injective_spot_exchange_rpc.StreamOrdersResponse.timestamp', index=2,
number=3, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2063,
serialized_end=2188,
)
_TRADESREQUEST = _descriptor.Descriptor(
name='TradesRequest',
full_name='injective_spot_exchange_rpc.TradesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.TradesRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_side', full_name='injective_spot_exchange_rpc.TradesRequest.execution_side', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='direction', full_name='injective_spot_exchange_rpc.TradesRequest.direction', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.TradesRequest.subaccount_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='skip', full_name='injective_spot_exchange_rpc.TradesRequest.skip', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='injective_spot_exchange_rpc.TradesRequest.limit', index=5,
number=6, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2191,
serialized_end=2320,
)
_TRADESRESPONSE = _descriptor.Descriptor(
name='TradesResponse',
full_name='injective_spot_exchange_rpc.TradesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trades', full_name='injective_spot_exchange_rpc.TradesResponse.trades', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2322,
serialized_end=2394,
)
_SPOTTRADE = _descriptor.Descriptor(
name='SpotTrade',
full_name='injective_spot_exchange_rpc.SpotTrade',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='order_hash', full_name='injective_spot_exchange_rpc.SpotTrade.order_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.SpotTrade.subaccount_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.SpotTrade.market_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trade_execution_type', full_name='injective_spot_exchange_rpc.SpotTrade.trade_execution_type', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trade_direction', full_name='injective_spot_exchange_rpc.SpotTrade.trade_direction', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='price', full_name='injective_spot_exchange_rpc.SpotTrade.price', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fee', full_name='injective_spot_exchange_rpc.SpotTrade.fee', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='executed_at', full_name='injective_spot_exchange_rpc.SpotTrade.executed_at', index=7,
number=8, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fee_recipient', full_name='injective_spot_exchange_rpc.SpotTrade.fee_recipient', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2397,
serialized_end=2638,
)
_STREAMTRADESREQUEST = _descriptor.Descriptor(
name='StreamTradesRequest',
full_name='injective_spot_exchange_rpc.StreamTradesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.StreamTradesRequest.market_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_side', full_name='injective_spot_exchange_rpc.StreamTradesRequest.execution_side', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='direction', full_name='injective_spot_exchange_rpc.StreamTradesRequest.direction', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.StreamTradesRequest.subaccount_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='skip', full_name='injective_spot_exchange_rpc.StreamTradesRequest.skip', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='injective_spot_exchange_rpc.StreamTradesRequest.limit', index=5,
number=6, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2641,
serialized_end=2776,
)
_STREAMTRADESRESPONSE = _descriptor.Descriptor(
name='StreamTradesResponse',
full_name='injective_spot_exchange_rpc.StreamTradesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trade', full_name='injective_spot_exchange_rpc.StreamTradesResponse.trade', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operation_type', full_name='injective_spot_exchange_rpc.StreamTradesResponse.operation_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='injective_spot_exchange_rpc.StreamTradesResponse.timestamp', index=2,
number=3, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2778,
serialized_end=2898,
)
_SUBACCOUNTORDERSLISTREQUEST = _descriptor.Descriptor(
name='SubaccountOrdersListRequest',
full_name='injective_spot_exchange_rpc.SubaccountOrdersListRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.SubaccountOrdersListRequest.subaccount_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.SubaccountOrdersListRequest.market_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2900,
serialized_end=2971,
)
_SUBACCOUNTORDERSLISTRESPONSE = _descriptor.Descriptor(
name='SubaccountOrdersListResponse',
full_name='injective_spot_exchange_rpc.SubaccountOrdersListResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='orders', full_name='injective_spot_exchange_rpc.SubaccountOrdersListResponse.orders', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2973,
serialized_end=3064,
)
_SUBACCOUNTTRADESLISTREQUEST = _descriptor.Descriptor(
name='SubaccountTradesListRequest',
full_name='injective_spot_exchange_rpc.SubaccountTradesListRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subaccount_id', full_name='injective_spot_exchange_rpc.SubaccountTradesListRequest.subaccount_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='market_id', full_name='injective_spot_exchange_rpc.SubaccountTradesListRequest.market_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_type', full_name='injective_spot_exchange_rpc.SubaccountTradesListRequest.execution_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='direction', full_name='injective_spot_exchange_rpc.SubaccountTradesListRequest.direction', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3066,
serialized_end=3180,
)
_SUBACCOUNTTRADESLISTRESPONSE = _descriptor.Descriptor(
name='SubaccountTradesListResponse',
full_name='injective_spot_exchange_rpc.SubaccountTradesListResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trades', full_name='injective_spot_exchange_rpc.SubaccountTradesListResponse.trades', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3182,
serialized_end=3268,
)
_MARKETSRESPONSE.fields_by_name['markets'].message_type = _SPOTMARKETINFO
_SPOTMARKETINFO.fields_by_name['base_token_meta'].message_type = _TOKENMETA
_SPOTMARKETINFO.fields_by_name['quote_token_meta'].message_type = _TOKENMETA
_MARKETRESPONSE.fields_by_name['market'].message_type = _SPOTMARKETINFO
_STREAMMARKETSRESPONSE.fields_by_name['market'].message_type = _SPOTMARKETINFO
_ORDERBOOKRESPONSE.fields_by_name['orderbook'].message_type = _SPOTLIMITORDERBOOK
_SPOTLIMITORDERBOOK.fields_by_name['buys'].message_type = _PRICELEVEL
_SPOTLIMITORDERBOOK.fields_by_name['sells'].message_type = _PRICELEVEL
_STREAMORDERBOOKRESPONSE.fields_by_name['orderbook'].message_type = _SPOTLIMITORDERBOOK
_ORDERSRESPONSE.fields_by_name['orders'].message_type = _SPOTLIMITORDER
_STREAMORDERSRESPONSE.fields_by_name['order'].message_type = _SPOTLIMITORDER
_TRADESRESPONSE.fields_by_name['trades'].message_type = _SPOTTRADE
_SPOTTRADE.fields_by_name['price'].message_type = _PRICELEVEL
_STREAMTRADESRESPONSE.fields_by_name['trade'].message_type = _SPOTTRADE
_SUBACCOUNTORDERSLISTRESPONSE.fields_by_name['orders'].message_type = _SPOTLIMITORDER
_SUBACCOUNTTRADESLISTRESPONSE.fields_by_name['trades'].message_type = _SPOTTRADE
DESCRIPTOR.message_types_by_name['MarketsRequest'] = _MARKETSREQUEST
DESCRIPTOR.message_types_by_name['MarketsResponse'] = _MARKETSRESPONSE
DESCRIPTOR.message_types_by_name['SpotMarketInfo'] = _SPOTMARKETINFO
DESCRIPTOR.message_types_by_name['TokenMeta'] = _TOKENMETA
DESCRIPTOR.message_types_by_name['MarketRequest'] = _MARKETREQUEST
DESCRIPTOR.message_types_by_name['MarketResponse'] = _MARKETRESPONSE
DESCRIPTOR.message_types_by_name['StreamMarketsRequest'] = _STREAMMARKETSREQUEST
DESCRIPTOR.message_types_by_name['StreamMarketsResponse'] = _STREAMMARKETSRESPONSE
DESCRIPTOR.message_types_by_name['OrderbookRequest'] = _ORDERBOOKREQUEST
DESCRIPTOR.message_types_by_name['OrderbookResponse'] = _ORDERBOOKRESPONSE
DESCRIPTOR.message_types_by_name['SpotLimitOrderbook'] = _SPOTLIMITORDERBOOK
DESCRIPTOR.message_types_by_name['PriceLevel'] = _PRICELEVEL
DESCRIPTOR.message_types_by_name['StreamOrderbookRequest'] = _STREAMORDERBOOKREQUEST
DESCRIPTOR.message_types_by_name['StreamOrderbookResponse'] = _STREAMORDERBOOKRESPONSE
DESCRIPTOR.message_types_by_name['OrdersRequest'] = _ORDERSREQUEST
DESCRIPTOR.message_types_by_name['OrdersResponse'] = _ORDERSRESPONSE
DESCRIPTOR.message_types_by_name['SpotLimitOrder'] = _SPOTLIMITORDER
DESCRIPTOR.message_types_by_name['StreamOrdersRequest'] = _STREAMORDERSREQUEST
DESCRIPTOR.message_types_by_name['StreamOrdersResponse'] = _STREAMORDERSRESPONSE
DESCRIPTOR.message_types_by_name['TradesRequest'] = _TRADESREQUEST
DESCRIPTOR.message_types_by_name['TradesResponse'] = _TRADESRESPONSE
DESCRIPTOR.message_types_by_name['SpotTrade'] = _SPOTTRADE
DESCRIPTOR.message_types_by_name['StreamTradesRequest'] = _STREAMTRADESREQUEST
DESCRIPTOR.message_types_by_name['StreamTradesResponse'] = _STREAMTRADESRESPONSE
DESCRIPTOR.message_types_by_name['SubaccountOrdersListRequest'] = _SUBACCOUNTORDERSLISTREQUEST
DESCRIPTOR.message_types_by_name['SubaccountOrdersListResponse'] = _SUBACCOUNTORDERSLISTRESPONSE
DESCRIPTOR.message_types_by_name['SubaccountTradesListRequest'] = _SUBACCOUNTTRADESLISTREQUEST
DESCRIPTOR.message_types_by_name['SubaccountTradesListResponse'] = _SUBACCOUNTTRADESLISTRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MarketsRequest = _reflection.GeneratedProtocolMessageType('MarketsRequest', (_message.Message,), {
'DESCRIPTOR' : _MARKETSREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.MarketsRequest)
})
_sym_db.RegisterMessage(MarketsRequest)
MarketsResponse = _reflection.GeneratedProtocolMessageType('MarketsResponse', (_message.Message,), {
'DESCRIPTOR' : _MARKETSRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.MarketsResponse)
})
_sym_db.RegisterMessage(MarketsResponse)
SpotMarketInfo = _reflection.GeneratedProtocolMessageType('SpotMarketInfo', (_message.Message,), {
'DESCRIPTOR' : _SPOTMARKETINFO,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SpotMarketInfo)
})
_sym_db.RegisterMessage(SpotMarketInfo)
TokenMeta = _reflection.GeneratedProtocolMessageType('TokenMeta', (_message.Message,), {
'DESCRIPTOR' : _TOKENMETA,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.TokenMeta)
})
_sym_db.RegisterMessage(TokenMeta)
MarketRequest = _reflection.GeneratedProtocolMessageType('MarketRequest', (_message.Message,), {
'DESCRIPTOR' : _MARKETREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.MarketRequest)
})
_sym_db.RegisterMessage(MarketRequest)
MarketResponse = _reflection.GeneratedProtocolMessageType('MarketResponse', (_message.Message,), {
'DESCRIPTOR' : _MARKETRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.MarketResponse)
})
_sym_db.RegisterMessage(MarketResponse)
StreamMarketsRequest = _reflection.GeneratedProtocolMessageType('StreamMarketsRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMMARKETSREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamMarketsRequest)
})
_sym_db.RegisterMessage(StreamMarketsRequest)
StreamMarketsResponse = _reflection.GeneratedProtocolMessageType('StreamMarketsResponse', (_message.Message,), {
'DESCRIPTOR' : _STREAMMARKETSRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamMarketsResponse)
})
_sym_db.RegisterMessage(StreamMarketsResponse)
OrderbookRequest = _reflection.GeneratedProtocolMessageType('OrderbookRequest', (_message.Message,), {
'DESCRIPTOR' : _ORDERBOOKREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.OrderbookRequest)
})
_sym_db.RegisterMessage(OrderbookRequest)
OrderbookResponse = _reflection.GeneratedProtocolMessageType('OrderbookResponse', (_message.Message,), {
'DESCRIPTOR' : _ORDERBOOKRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.OrderbookResponse)
})
_sym_db.RegisterMessage(OrderbookResponse)
SpotLimitOrderbook = _reflection.GeneratedProtocolMessageType('SpotLimitOrderbook', (_message.Message,), {
'DESCRIPTOR' : _SPOTLIMITORDERBOOK,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SpotLimitOrderbook)
})
_sym_db.RegisterMessage(SpotLimitOrderbook)
PriceLevel = _reflection.GeneratedProtocolMessageType('PriceLevel', (_message.Message,), {
'DESCRIPTOR' : _PRICELEVEL,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.PriceLevel)
})
_sym_db.RegisterMessage(PriceLevel)
StreamOrderbookRequest = _reflection.GeneratedProtocolMessageType('StreamOrderbookRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMORDERBOOKREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamOrderbookRequest)
})
_sym_db.RegisterMessage(StreamOrderbookRequest)
StreamOrderbookResponse = _reflection.GeneratedProtocolMessageType('StreamOrderbookResponse', (_message.Message,), {
'DESCRIPTOR' : _STREAMORDERBOOKRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamOrderbookResponse)
})
_sym_db.RegisterMessage(StreamOrderbookResponse)
OrdersRequest = _reflection.GeneratedProtocolMessageType('OrdersRequest', (_message.Message,), {
'DESCRIPTOR' : _ORDERSREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.OrdersRequest)
})
_sym_db.RegisterMessage(OrdersRequest)
OrdersResponse = _reflection.GeneratedProtocolMessageType('OrdersResponse', (_message.Message,), {
'DESCRIPTOR' : _ORDERSRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.OrdersResponse)
})
_sym_db.RegisterMessage(OrdersResponse)
SpotLimitOrder = _reflection.GeneratedProtocolMessageType('SpotLimitOrder', (_message.Message,), {
'DESCRIPTOR' : _SPOTLIMITORDER,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SpotLimitOrder)
})
_sym_db.RegisterMessage(SpotLimitOrder)
StreamOrdersRequest = _reflection.GeneratedProtocolMessageType('StreamOrdersRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMORDERSREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamOrdersRequest)
})
_sym_db.RegisterMessage(StreamOrdersRequest)
StreamOrdersResponse = _reflection.GeneratedProtocolMessageType('StreamOrdersResponse', (_message.Message,), {
'DESCRIPTOR' : _STREAMORDERSRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamOrdersResponse)
})
_sym_db.RegisterMessage(StreamOrdersResponse)
TradesRequest = _reflection.GeneratedProtocolMessageType('TradesRequest', (_message.Message,), {
'DESCRIPTOR' : _TRADESREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.TradesRequest)
})
_sym_db.RegisterMessage(TradesRequest)
TradesResponse = _reflection.GeneratedProtocolMessageType('TradesResponse', (_message.Message,), {
'DESCRIPTOR' : _TRADESRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.TradesResponse)
})
_sym_db.RegisterMessage(TradesResponse)
SpotTrade = _reflection.GeneratedProtocolMessageType('SpotTrade', (_message.Message,), {
'DESCRIPTOR' : _SPOTTRADE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SpotTrade)
})
_sym_db.RegisterMessage(SpotTrade)
StreamTradesRequest = _reflection.GeneratedProtocolMessageType('StreamTradesRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMTRADESREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamTradesRequest)
})
_sym_db.RegisterMessage(StreamTradesRequest)
StreamTradesResponse = _reflection.GeneratedProtocolMessageType('StreamTradesResponse', (_message.Message,), {
'DESCRIPTOR' : _STREAMTRADESRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.StreamTradesResponse)
})
_sym_db.RegisterMessage(StreamTradesResponse)
SubaccountOrdersListRequest = _reflection.GeneratedProtocolMessageType('SubaccountOrdersListRequest', (_message.Message,), {
'DESCRIPTOR' : _SUBACCOUNTORDERSLISTREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SubaccountOrdersListRequest)
})
_sym_db.RegisterMessage(SubaccountOrdersListRequest)
SubaccountOrdersListResponse = _reflection.GeneratedProtocolMessageType('SubaccountOrdersListResponse', (_message.Message,), {
'DESCRIPTOR' : _SUBACCOUNTORDERSLISTRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SubaccountOrdersListResponse)
})
_sym_db.RegisterMessage(SubaccountOrdersListResponse)
SubaccountTradesListRequest = _reflection.GeneratedProtocolMessageType('SubaccountTradesListRequest', (_message.Message,), {
'DESCRIPTOR' : _SUBACCOUNTTRADESLISTREQUEST,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SubaccountTradesListRequest)
})
_sym_db.RegisterMessage(SubaccountTradesListRequest)
SubaccountTradesListResponse = _reflection.GeneratedProtocolMessageType('SubaccountTradesListResponse', (_message.Message,), {
'DESCRIPTOR' : _SUBACCOUNTTRADESLISTRESPONSE,
'__module__' : 'exchange.injective_spot_exchange_rpc_pb2'
# @@protoc_insertion_point(class_scope:injective_spot_exchange_rpc.SubaccountTradesListResponse)
})
_sym_db.RegisterMessage(SubaccountTradesListResponse)
DESCRIPTOR._options = None
_INJECTIVESPOTEXCHANGERPC = _descriptor.ServiceDescriptor(
name='InjectiveSpotExchangeRPC',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=3271,
serialized_end=4576,
methods=[
_descriptor.MethodDescriptor(
name='Markets',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.Markets',
index=0,
containing_service=None,
input_type=_MARKETSREQUEST,
output_type=_MARKETSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Market',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.Market',
index=1,
containing_service=None,
input_type=_MARKETREQUEST,
output_type=_MARKETRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamMarkets',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.StreamMarkets',
index=2,
containing_service=None,
input_type=_STREAMMARKETSREQUEST,
output_type=_STREAMMARKETSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Orderbook',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.Orderbook',
index=3,
containing_service=None,
input_type=_ORDERBOOKREQUEST,
output_type=_ORDERBOOKRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamOrderbook',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.StreamOrderbook',
index=4,
containing_service=None,
input_type=_STREAMORDERBOOKREQUEST,
output_type=_STREAMORDERBOOKRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Orders',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.Orders',
index=5,
containing_service=None,
input_type=_ORDERSREQUEST,
output_type=_ORDERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamOrders',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.StreamOrders',
index=6,
containing_service=None,
input_type=_STREAMORDERSREQUEST,
output_type=_STREAMORDERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Trades',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.Trades',
index=7,
containing_service=None,
input_type=_TRADESREQUEST,
output_type=_TRADESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamTrades',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.StreamTrades',
index=8,
containing_service=None,
input_type=_STREAMTRADESREQUEST,
output_type=_STREAMTRADESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubaccountOrdersList',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.SubaccountOrdersList',
index=9,
containing_service=None,
input_type=_SUBACCOUNTORDERSLISTREQUEST,
output_type=_SUBACCOUNTORDERSLISTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubaccountTradesList',
full_name='injective_spot_exchange_rpc.InjectiveSpotExchangeRPC.SubaccountTradesList',
index=10,
containing_service=None,
input_type=_SUBACCOUNTTRADESLISTREQUEST,
output_type=_SUBACCOUNTTRADESLISTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_INJECTIVESPOTEXCHANGERPC)
DESCRIPTOR.services_by_name['InjectiveSpotExchangeRPC'] = _INJECTIVESPOTEXCHANGERPC
# @@protoc_insertion_point(module_scope)
| 45.833239
| 6,964
| 0.774021
| 9,848
| 80,804
| 5.973396
| 0.040719
| 0.042566
| 0.083891
| 0.09506
| 0.798677
| 0.753017
| 0.743651
| 0.678629
| 0.658093
| 0.645548
| 0
| 0.030335
| 0.112259
| 80,804
| 1,762
| 6,965
| 45.859251
| 0.789734
| 0.032201
| 0
| 0.706928
| 1
| 0.003066
| 0.216302
| 0.174108
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002452
| 0
| 0.002452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cc853d2b679d302ecfa29b743b6bf2b8b6578b25
| 50
|
py
|
Python
|
ExerciciosPython/ex001.py
|
LucasBalbinoSS/Exercicios-Python
|
2e9d3a8ec4ab24a2732c461a84f51bde54902a24
|
[
"MIT"
] | null | null | null |
ExerciciosPython/ex001.py
|
LucasBalbinoSS/Exercicios-Python
|
2e9d3a8ec4ab24a2732c461a84f51bde54902a24
|
[
"MIT"
] | null | null | null |
ExerciciosPython/ex001.py
|
LucasBalbinoSS/Exercicios-Python
|
2e9d3a8ec4ab24a2732c461a84f51bde54902a24
|
[
"MIT"
] | null | null | null |
msg = '\033[1;31mOlá Mundo\033[1;32m!'
print(msg)
| 16.666667
| 38
| 0.66
| 10
| 50
| 3.3
| 0.7
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.266667
| 0.1
| 50
| 2
| 39
| 25
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
ccd153ed89bef21c57f482532bdb27dbfd333b9b
| 86
|
py
|
Python
|
solver/__init__.py
|
SeongSuKim95/Re-ID-baseline
|
b145bba712492f7a93cd3771e007fa694b1c44b6
|
[
"MIT"
] | 297
|
2021-03-26T14:29:47.000Z
|
2021-09-10T11:33:56.000Z
|
PASS_transreid/solver/__init__.py
|
CASIA-IVA-Lab/PASS-reID
|
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
|
[
"Apache-2.0"
] | 31
|
2019-06-13T02:03:22.000Z
|
2021-12-30T03:55:46.000Z
|
PASS_transreid/solver/__init__.py
|
CASIA-IVA-Lab/PASS-reID
|
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
|
[
"Apache-2.0"
] | 71
|
2019-06-17T01:10:08.000Z
|
2022-03-03T06:51:48.000Z
|
from .lr_scheduler import WarmupMultiStepLR
from .make_optimizer import make_optimizer
| 43
| 43
| 0.895349
| 11
| 86
| 6.727273
| 0.636364
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081395
| 86
| 2
| 44
| 43
| 0.936709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aeb3b066939678ceaaedbef801437ee31c23e283
| 2,783
|
py
|
Python
|
epytope/Data/pssms/smmpmbec/mat/A_68_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/smmpmbec/mat/A_68_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/smmpmbec/mat/A_68_02_11.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
A_68_02_11 = {0: {'A': 0.114, 'C': 0.028, 'E': -0.075, 'D': -0.01, 'G': 0.003, 'F': 0.041, 'I': 0.176, 'H': -0.078, 'K': -0.107, 'M': -0.028, 'L': -0.108, 'N': -0.021, 'Q': -0.12, 'P': 0.149, 'S': -0.061, 'R': -0.029, 'T': -0.017, 'W': 0.021, 'V': 0.112, 'Y': 0.009}, 1: {'A': -0.018, 'C': 0.039, 'E': 0.112, 'D': 0.111, 'G': 0.046, 'F': -0.114, 'I': -0.103, 'H': 0.055, 'K': -0.006, 'M': 0.063, 'L': -0.105, 'N': 0.081, 'Q': 0.212, 'P': -0.169, 'S': 0.093, 'R': 0.092, 'T': -0.05, 'W': 0.031, 'V': -0.269, 'Y': -0.101}, 2: {'A': -0.007, 'C': -0.001, 'E': -0.001, 'D': 0.0, 'G': -0.001, 'F': -0.003, 'I': -0.003, 'H': 0.004, 'K': 0.005, 'M': -0.001, 'L': -0.001, 'N': 0.002, 'Q': 0.0, 'P': -0.001, 'S': 0.0, 'R': 0.006, 'T': -0.001, 'W': 0.001, 'V': -0.003, 'Y': 0.002}, 3: {'A': 0.0, 'C': -0.001, 'E': -0.003, 'D': -0.004, 'G': 0.001, 'F': -0.003, 'I': -0.001, 'H': 0.001, 'K': 0.003, 'M': 0.0, 'L': -0.001, 'N': 0.0, 'Q': 0.002, 'P': -0.002, 'S': 0.002, 'R': 0.004, 'T': 0.002, 'W': -0.001, 'V': 0.001, 'Y': -0.0}, 4: {'A': 0.014, 'C': -0.007, 'E': 0.01, 'D': 0.005, 'G': 0.003, 'F': -0.055, 'I': -0.0, 'H': 0.001, 'K': 0.025, 'M': 0.006, 'L': 0.01, 'N': -0.005, 'Q': 0.034, 'P': 0.069, 'S': -0.011, 'R': 0.005, 'T': -0.016, 'W': -0.034, 'V': -0.024, 'Y': -0.03}, 5: {'A': -0.003, 'C': -0.001, 'E': -0.006, 'D': -0.005, 'G': -0.002, 'F': -0.002, 'I': -0.007, 'H': 0.007, 'K': 0.012, 'M': -0.004, 'L': -0.01, 'N': -0.0, 'Q': 0.001, 'P': 0.001, 'S': 0.005, 'R': 0.018, 'T': 0.0, 'W': 0.0, 'V': -0.006, 'Y': 0.002}, 6: {'A': 0.005, 'C': 0.002, 'E': 0.001, 'D': 0.005, 'G': 0.002, 'F': -0.001, 'I': 0.002, 'H': -0.004, 'K': -0.001, 'M': -0.0, 'L': -0.0, 'N': -0.0, 'Q': -0.001, 'P': 0.013, 'S': -0.006, 'R': -0.002, 'T': -0.002, 'W': -0.003, 'V': 0.001, 'Y': -0.009}, 7: {'A': -0.003, 'C': 0.0, 'E': 0.001, 'D': 0.001, 'G': -0.0, 'F': -0.0, 'I': -0.001, 'H': 0.0, 'K': -0.003, 'M': -0.001, 'L': -0.002, 'N': 0.001, 'Q': 0.001, 'P': 0.0, 'S': 0.002, 'R': 0.001, 'T': 0.001, 'W': 0.002, 'V': -0.002, 'Y': 0.0}, 8: {'A': 0.009, 'C': -0.007, 'E': 0.006, 'D': 0.017, 'G': -0.009, 'F': -0.041, 'I': -0.007, 'H': 0.003, 'K': 0.032, 'M': -0.014, 'L': -0.015, 'N': -0.015, 'Q': 0.01, 'P': 0.032, 'S': 0.003, 'R': 0.039, 'T': 0.001, 'W': -0.019, 'V': -0.012, 'Y': -0.013}, 9: {'A': 0.179, 'C': -0.096, 'E': -0.148, 'D': -0.038, 'G': -0.138, 'F': -0.186, 'I': 0.261, 'H': 0.052, 'K': 0.23, 'M': 0.078, 'L': 0.014, 'N': 0.093, 'Q': -0.149, 'P': -1.165, 'S': 0.112, 'R': 0.067, 'T': 0.35, 'W': 0.121, 'V': 0.331, 'Y': 0.03}, 10: {'A': -0.129, 'C': 0.027, 'E': 0.017, 'D': 0.069, 'G': 0.028, 'F': -0.054, 'I': -0.46, 'H': 0.231, 'K': 0.195, 'M': -0.108, 'L': -0.251, 'N': 0.095, 'Q': 0.096, 'P': 0.044, 'S': 0.077, 'R': 0.379, 'T': -0.028, 'W': 0.091, 'V': -0.394, 'Y': 0.076}, -1: {'con': 4.32633}}
| 2,783
| 2,783
| 0.38807
| 679
| 2,783
| 1.586156
| 0.173785
| 0.122563
| 0.013928
| 0.016713
| 0.339833
| 0.063138
| 0.063138
| 0.063138
| 0
| 0
| 0
| 0.366409
| 0.163493
| 2,783
| 1
| 2,783
| 2,783
| 0.09622
| 0
| 0
| 0
| 0
| 0
| 0.080101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aef551682eafbd0ebd3da47804c4eed0165e8cea
| 281
|
py
|
Python
|
deformable_convolution/modules/__init__.py
|
Andyflying/LightNet-plusplus
|
24a76792ab4c1bf8f94fb1457539ded86ed2112e
|
[
"Apache-2.0"
] | null | null | null |
deformable_convolution/modules/__init__.py
|
Andyflying/LightNet-plusplus
|
24a76792ab4c1bf8f94fb1457539ded86ed2112e
|
[
"Apache-2.0"
] | null | null | null |
deformable_convolution/modules/__init__.py
|
Andyflying/LightNet-plusplus
|
24a76792ab4c1bf8f94fb1457539ded86ed2112e
|
[
"Apache-2.0"
] | null | null | null |
from .deform_conv import DeformConv, _DeformConv, DeformConvPack
from .modulated_deform_conv import ModulatedDeformConv, _ModulatedDeformConv, ModulatedDeformConvPack, ModulatedDeformConvTM
from .deform_psroi_pooling import DeformRoIPooling, _DeformRoIPooling, DeformRoIPoolingPack
| 93.666667
| 124
| 0.900356
| 24
| 281
| 10.208333
| 0.583333
| 0.081633
| 0.130612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064057
| 281
| 3
| 125
| 93.666667
| 0.931559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4e0edc1db2e148fc3539ad6974367bc52803bc95
| 925
|
py
|
Python
|
people/regex.py
|
ChrisWaites/human
|
cf7085ade64aed33ee942b96a5f4d0a6977ba187
|
[
"MIT"
] | 1
|
2020-03-30T10:05:58.000Z
|
2020-03-30T10:05:58.000Z
|
people/regex.py
|
ChrisWaites/human
|
cf7085ade64aed33ee942b96a5f4d0a6977ba187
|
[
"MIT"
] | 1
|
2018-04-26T04:41:22.000Z
|
2018-04-26T04:41:22.000Z
|
people/regex.py
|
ChrisWaites/people
|
cf7085ade64aed33ee942b96a5f4d0a6977ba187
|
[
"MIT"
] | null | null | null |
any = r'.*'
nonneg_int = r'\d+'
neg_int = r'-\d+'
int = r'-?\d+'
nonneg_float = r'\d*\.?\d+'
neg_float = r'-\d*\.?\d+'
float = r'-?\d*\.?\d+'
float_zero_to_one = r'0(\.\d+)?|1\.0'
url = r'((https?|ftp|file):\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?'
email = r'.+@.+'
phone = r'\+?(\d.*){3,}'
date = r'(0?[1-9]|[12][0-9]|3[01])([ \/\-])(0?[1-9]|1[012])\2([0-9][0-9][0-9][0-9])(([ -])([0-1]?[0-9]|2[0-3]):[0-5]?[0-9]:[0-5]?[0-9])?'
time = r'([01]?[0-9]|2[0-3]):[0-5][0-9]'
iso8601 = r'(?![+-]?\d{4,5}-?(?:\d{2}|W\d{2})T)(?:|(\d{4}|[+-]\d{5})-?(?:|(0\d|1[0-2])(?:|-?([0-2]\d|3[0-1]))|([0-2]\d{2}|3[0-5]\d|36[0-6])|W([0-4]\d|5[0-3])(?:|-?([1-7])))(?:(?!\d)|T(?=\d)))(?:|([01]\d|2[0-4])(?:|:?([0-5]\d)(?:|:?([0-5]\d)(?:|\.(\d{3})))(?:|[zZ]|([+-](?:[01]\d|2[0-4]))(?:|:?([0-5]\d)))))'
def union(*choices):
return r'({})'.format('|'.join(choices))
def csv(*choices):
return r'{}'.format(' *, *'.join(choices))
| 42.045455
| 307
| 0.371892
| 192
| 925
| 1.755208
| 0.234375
| 0.059347
| 0.04451
| 0.071217
| 0.379822
| 0.317507
| 0.133531
| 0.106825
| 0.053412
| 0
| 0
| 0.12922
| 0.071351
| 925
| 21
| 308
| 44.047619
| 0.263097
| 0
| 0
| 0
| 0
| 0.222222
| 0.660173
| 0.550866
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0.111111
| 0.222222
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
9d818c19e44f542f0a3a24081f66f939c5318c4b
| 5,345
|
py
|
Python
|
sdk/python/pulumi_azure_native/resources/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/resources/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/resources/__init__.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .azure_cli_script import *
from .azure_power_shell_script import *
from .deployment import *
from .deployment_at_management_group_scope import *
from .deployment_at_scope import *
from .deployment_at_subscription_scope import *
from .deployment_at_tenant_scope import *
from .deployment_script import *
from .get_azure_cli_script import *
from .get_azure_power_shell_script import *
from .get_deployment import *
from .get_deployment_at_management_group_scope import *
from .get_deployment_at_scope import *
from .get_deployment_at_subscription_scope import *
from .get_deployment_at_tenant_scope import *
from .get_deployment_script import *
from .get_resource import *
from .get_resource_group import *
from .get_tag_at_scope import *
from .get_template_spec import *
from .get_template_spec_version import *
from .resource import *
from .resource_group import *
from .tag_at_scope import *
from .template_spec import *
from .template_spec_version import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_azure_native.resources.v20151101 as __v20151101
v20151101 = __v20151101
import pulumi_azure_native.resources.v20160201 as __v20160201
v20160201 = __v20160201
import pulumi_azure_native.resources.v20160701 as __v20160701
v20160701 = __v20160701
import pulumi_azure_native.resources.v20160901 as __v20160901
v20160901 = __v20160901
import pulumi_azure_native.resources.v20170510 as __v20170510
v20170510 = __v20170510
import pulumi_azure_native.resources.v20180201 as __v20180201
v20180201 = __v20180201
import pulumi_azure_native.resources.v20180501 as __v20180501
v20180501 = __v20180501
import pulumi_azure_native.resources.v20190301 as __v20190301
v20190301 = __v20190301
import pulumi_azure_native.resources.v20190501 as __v20190501
v20190501 = __v20190501
import pulumi_azure_native.resources.v20190510 as __v20190510
v20190510 = __v20190510
import pulumi_azure_native.resources.v20190601preview as __v20190601preview
v20190601preview = __v20190601preview
import pulumi_azure_native.resources.v20190701 as __v20190701
v20190701 = __v20190701
import pulumi_azure_native.resources.v20190801 as __v20190801
v20190801 = __v20190801
import pulumi_azure_native.resources.v20191001 as __v20191001
v20191001 = __v20191001
import pulumi_azure_native.resources.v20191001preview as __v20191001preview
v20191001preview = __v20191001preview
import pulumi_azure_native.resources.v20200601 as __v20200601
v20200601 = __v20200601
import pulumi_azure_native.resources.v20200801 as __v20200801
v20200801 = __v20200801
import pulumi_azure_native.resources.v20201001 as __v20201001
v20201001 = __v20201001
import pulumi_azure_native.resources.v20210101 as __v20210101
v20210101 = __v20210101
import pulumi_azure_native.resources.v20210301preview as __v20210301preview
v20210301preview = __v20210301preview
import pulumi_azure_native.resources.v20210401 as __v20210401
v20210401 = __v20210401
import pulumi_azure_native.resources.v20210501 as __v20210501
v20210501 = __v20210501
else:
v20151101 = _utilities.lazy_import('pulumi_azure_native.resources.v20151101')
v20160201 = _utilities.lazy_import('pulumi_azure_native.resources.v20160201')
v20160701 = _utilities.lazy_import('pulumi_azure_native.resources.v20160701')
v20160901 = _utilities.lazy_import('pulumi_azure_native.resources.v20160901')
v20170510 = _utilities.lazy_import('pulumi_azure_native.resources.v20170510')
v20180201 = _utilities.lazy_import('pulumi_azure_native.resources.v20180201')
v20180501 = _utilities.lazy_import('pulumi_azure_native.resources.v20180501')
v20190301 = _utilities.lazy_import('pulumi_azure_native.resources.v20190301')
v20190501 = _utilities.lazy_import('pulumi_azure_native.resources.v20190501')
v20190510 = _utilities.lazy_import('pulumi_azure_native.resources.v20190510')
v20190601preview = _utilities.lazy_import('pulumi_azure_native.resources.v20190601preview')
v20190701 = _utilities.lazy_import('pulumi_azure_native.resources.v20190701')
v20190801 = _utilities.lazy_import('pulumi_azure_native.resources.v20190801')
v20191001 = _utilities.lazy_import('pulumi_azure_native.resources.v20191001')
v20191001preview = _utilities.lazy_import('pulumi_azure_native.resources.v20191001preview')
v20200601 = _utilities.lazy_import('pulumi_azure_native.resources.v20200601')
v20200801 = _utilities.lazy_import('pulumi_azure_native.resources.v20200801')
v20201001 = _utilities.lazy_import('pulumi_azure_native.resources.v20201001')
v20210101 = _utilities.lazy_import('pulumi_azure_native.resources.v20210101')
v20210301preview = _utilities.lazy_import('pulumi_azure_native.resources.v20210301preview')
v20210401 = _utilities.lazy_import('pulumi_azure_native.resources.v20210401')
v20210501 = _utilities.lazy_import('pulumi_azure_native.resources.v20210501')
| 49.490741
| 95
| 0.815716
| 614
| 5,345
| 6.636808
| 0.143322
| 0.129571
| 0.183558
| 0.248344
| 0.671411
| 0.60319
| 0.263558
| 0
| 0
| 0
| 0
| 0.225373
| 0.122544
| 5,345
| 107
| 96
| 49.953271
| 0.643497
| 0.043218
| 0
| 0
| 1
| 0
| 0.172117
| 0.172117
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.757576
| 0
| 0.757576
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9dd5b07afcda55724558ef9e31ee76303b61e3fe
| 209
|
py
|
Python
|
RecoBTag/Skimming/python/btagElecInJet_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoBTag/Skimming/python/btagElecInJet_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoBTag/Skimming/python/btagElecInJet_SkimPaths_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from RecoBTag.Skimming.btagElecInJet_HLT_cfi import *
from RecoBTag.Skimming.btagElecInJet_cfi import *
btagElecInJetPath = cms.Path(btagElecInJet_HLT*btagElecInJet)
| 29.857143
| 61
| 0.856459
| 25
| 209
| 7
| 0.56
| 0.137143
| 0.228571
| 0.377143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08134
| 209
| 6
| 62
| 34.833333
| 0.911458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1c0bb2dc6786531d6dcb602157eff0e14d5c858
| 29
|
py
|
Python
|
vnpy/api/femas/__init__.py
|
xiumingxu/vnpy-xx
|
8b2d9ecdabcb7931d46fd92fad2d3701b7e66975
|
[
"MIT"
] | null | null | null |
vnpy/api/femas/__init__.py
|
xiumingxu/vnpy-xx
|
8b2d9ecdabcb7931d46fd92fad2d3701b7e66975
|
[
"MIT"
] | null | null | null |
vnpy/api/femas/__init__.py
|
xiumingxu/vnpy-xx
|
8b2d9ecdabcb7931d46fd92fad2d3701b7e66975
|
[
"MIT"
] | null | null | null |
from .femas_constant import *
| 29
| 29
| 0.827586
| 4
| 29
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d1c3b7893325a2330caef82e57fc6e99aaa855f1
| 120
|
py
|
Python
|
src/models/logistic_test_model.py
|
pkiage/credit-risk-modelling-tool
|
74f6cdb27c8333c9cae5b08b91b5521af8e444e0
|
[
"MIT"
] | 1
|
2022-03-03T10:27:23.000Z
|
2022-03-03T10:27:23.000Z
|
src/models/logistic_test_model.py
|
pkiage/credit-risk-modelling-tool
|
74f6cdb27c8333c9cae5b08b91b5521af8e444e0
|
[
"MIT"
] | null | null | null |
src/models/logistic_test_model.py
|
pkiage/credit-risk-modelling-tool
|
74f6cdb27c8333c9cae5b08b91b5521af8e444e0
|
[
"MIT"
] | 1
|
2022-03-29T14:40:20.000Z
|
2022-03-29T14:40:20.000Z
|
from models.util_test import make_tests_view
logistic_test_model = make_tests_view(
"Logistic", "Logistic Model")
| 24
| 45
| 0.791667
| 17
| 120
| 5.176471
| 0.588235
| 0.204545
| 0.295455
| 0.477273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 120
| 4
| 46
| 30
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0.183333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ae2c0d7ddbca17e29543aebb7881d212e0d13199
| 46
|
py
|
Python
|
src/visualization/__init__.py
|
blotspot/expanse-book-analysis
|
03288e34d01eb9465205c764b8ba5d7062ddd5ab
|
[
"MIT"
] | null | null | null |
src/visualization/__init__.py
|
blotspot/expanse-book-analysis
|
03288e34d01eb9465205c764b8ba5d7062ddd5ab
|
[
"MIT"
] | null | null | null |
src/visualization/__init__.py
|
blotspot/expanse-book-analysis
|
03288e34d01eb9465205c764b8ba5d7062ddd5ab
|
[
"MIT"
] | null | null | null |
from .image import *
from .visualize import *
| 15.333333
| 24
| 0.73913
| 6
| 46
| 5.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 25
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ae4575a8354cb779ebe9eed7c8ed676549952c32
| 4,556
|
py
|
Python
|
eop/special_numeric.py
|
redhog/EmbarrassmentOfPandas
|
e0c5c38bfaa79c04424f0d8ecde2c83b7da64908
|
[
"MIT"
] | null | null | null |
eop/special_numeric.py
|
redhog/EmbarrassmentOfPandas
|
e0c5c38bfaa79c04424f0d8ecde2c83b7da64908
|
[
"MIT"
] | null | null | null |
eop/special_numeric.py
|
redhog/EmbarrassmentOfPandas
|
e0c5c38bfaa79c04424f0d8ecde2c83b7da64908
|
[
"MIT"
] | 1
|
2021-04-28T22:03:09.000Z
|
2021-04-28T22:03:09.000Z
|
class SpecialNumeric(object):
def __add__(self, *arg, **kw): return self.__getattr__("__add__")(*arg, **kw)
def __sub__(self, *arg, **kw): return self.__getattr__("__sub__")(*arg, **kw)
def __mul__(self, *arg, **kw): return self.__getattr__("__mul__")(*arg, **kw)
def __floordiv__(self, *arg, **kw): return self.__getattr__("__floordiv__")(*arg, **kw)
def __mod__(self, *arg, **kw): return self.__getattr__("__mod__")(*arg, **kw)
def __divmod__(self, *arg, **kw): return self.__getattr__("__divmod__")(*arg, **kw)
def __pow__(self, *arg, **kw): return self.__getattr__("__pow__")(*arg, **kw)
def __lshift__(self, *arg, **kw): return self.__getattr__("__lshift__")(*arg, **kw)
def __rshift__(self, *arg, **kw): return self.__getattr__("__rshift__")(*arg, **kw)
def __and__(self, *arg, **kw): return self.__getattr__("__and__")(*arg, **kw)
def __xor__(self, *arg, **kw): return self.__getattr__("__xor__")(*arg, **kw)
def __or__(self, *arg, **kw): return self.__getattr__("__or__")(*arg, **kw)
def __div__(self, *arg, **kw): return self.__getattr__("__div__")(*arg, **kw)
def __truediv__(self, *arg, **kw): return self.__getattr__("__truediv__")(*arg, **kw)
def __radd__(self, *arg, **kw): return self.__getattr__("__radd__")(*arg, **kw)
def __rsub__(self, *arg, **kw): return self.__getattr__("__rsub__")(*arg, **kw)
def __rmul__(self, *arg, **kw): return self.__getattr__("__rmul__")(*arg, **kw)
def __rdiv__(self, *arg, **kw): return self.__getattr__("__rdiv__")(*arg, **kw)
def __rtruediv__(self, *arg, **kw): return self.__getattr__("__rtruediv__")(*arg, **kw)
def __rfloordiv__(self, *arg, **kw): return self.__getattr__("__rfloordiv__")(*arg, **kw)
def __rmod__(self, *arg, **kw): return self.__getattr__("__rmod__")(*arg, **kw)
def __rdivmod__(self, *arg, **kw): return self.__getattr__("__rdivmod__")(*arg, **kw)
def __rpow__(self, *arg, **kw): return self.__getattr__("__rpow__")(*arg, **kw)
def __rlshift__(self, *arg, **kw): return self.__getattr__("__rlshift__")(*arg, **kw)
def __rrshift__(self, *arg, **kw): return self.__getattr__("__rrshift__")(*arg, **kw)
def __rand__(self, *arg, **kw): return self.__getattr__("__rand__")(*arg, **kw)
def __rxor__(self, *arg, **kw): return self.__getattr__("__rxor__")(*arg, **kw)
def __ror__(self, *arg, **kw): return self.__getattr__("__ror__")(*arg, **kw)
def __iadd__(self, *arg, **kw): return self.__getattr__("__iadd__")(*arg, **kw)
def __isub__(self, *arg, **kw): return self.__getattr__("__isub__")(*arg, **kw)
def __imul__(self, *arg, **kw): return self.__getattr__("__imul__")(*arg, **kw)
def __idiv__(self, *arg, **kw): return self.__getattr__("__idiv__")(*arg, **kw)
def __itruediv__(self, *arg, **kw): return self.__getattr__("__itruediv__")(*arg, **kw)
def __ifloordiv__(self, *arg, **kw): return self.__getattr__("__ifloordiv__")(*arg, **kw)
def __imod__(self, *arg, **kw): return self.__getattr__("__imod__")(*arg, **kw)
def __ipow__(self, *arg, **kw): return self.__getattr__("__ipow__")(*arg, **kw)
def __ilshift__(self, *arg, **kw): return self.__getattr__("__ilshift__")(*arg, **kw)
def __irshift__(self, *arg, **kw): return self.__getattr__("__irshift__")(*arg, **kw)
def __iand__(self, *arg, **kw): return self.__getattr__("__iand__")(*arg, **kw)
def __ixor__(self, *arg, **kw): return self.__getattr__("__ixor__")(*arg, **kw)
def __ior__(self, *arg, **kw): return self.__getattr__("__ior__")(*arg, **kw)
def __neg__(self, *arg, **kw): return self.__getattr__("__neg__")(*arg, **kw)
def __pos__(self, *arg, **kw): return self.__getattr__("__pos__")(*arg, **kw)
def __abs__(self, *arg, **kw): return self.__getattr__("__abs__")(*arg, **kw)
def __invert__(self, *arg, **kw): return self.__getattr__("__invert__")(*arg, **kw)
def __complex__(self, *arg, **kw): return self.__getattr__("__complex__")(*arg, **kw)
def __int__(self, *arg, **kw): return self.__getattr__("__int__")(*arg, **kw)
def __long__(self, *arg, **kw): return self.__getattr__("__long__")(*arg, **kw)
def __float__(self, *arg, **kw): return self.__getattr__("__float__")(*arg, **kw)
def __oct__(self, *arg, **kw): return self.__getattr__("__oct__")(*arg, **kw)
def __hex__(self, *arg, **kw): return self.__getattr__("__hex__")(*arg, **kw)
def __index__(self, *arg, **kw): return self.__getattr__("__index__")(*arg, **kw)
def __coerce__(self, *arg, **kw): return self.__getattr__("__coerce__")(*arg, **kw)
| 82.836364
| 93
| 0.649912
| 586
| 4,556
| 3.967577
| 0.105802
| 0.227957
| 0.205161
| 0.341935
| 0.592688
| 0.592688
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128402
| 4,556
| 54
| 94
| 84.37037
| 0.585495
| 0
| 0
| 0
| 0
| 0
| 0.101185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.981481
| false
| 0
| 0
| 0.981481
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ae5b09be59ffe0a325947d3778ffaacb6b7bec38
| 22
|
py
|
Python
|
Blup.py
|
janaobsteter/Genotype_CODES
|
8adf70660ebff4dd106c666db02cdba8b8ce4f97
|
[
"Apache-2.0"
] | 1
|
2021-10-07T18:55:03.000Z
|
2021-10-07T18:55:03.000Z
|
Blup.py
|
janaobsteter/Genotype_CODES
|
8adf70660ebff4dd106c666db02cdba8b8ce4f97
|
[
"Apache-2.0"
] | null | null | null |
Blup.py
|
janaobsteter/Genotype_CODES
|
8adf70660ebff4dd106c666db02cdba8b8ce4f97
|
[
"Apache-2.0"
] | 1
|
2017-04-13T09:07:41.000Z
|
2017-04-13T09:07:41.000Z
|
from PyPedal import *
| 11
| 21
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ae625190cb1bcf2697aa3b2d14b4f4406dc644bb
| 167
|
py
|
Python
|
inclearn/lib/losses/__init__.py
|
sajjadahmadish/incremental_learning.pytorch
|
f01c1cf9cbafc930687a89dbdf7c1937d1ca2749
|
[
"MIT"
] | null | null | null |
inclearn/lib/losses/__init__.py
|
sajjadahmadish/incremental_learning.pytorch
|
f01c1cf9cbafc930687a89dbdf7c1937d1ca2749
|
[
"MIT"
] | null | null | null |
inclearn/lib/losses/__init__.py
|
sajjadahmadish/incremental_learning.pytorch
|
f01c1cf9cbafc930687a89dbdf7c1937d1ca2749
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .base import *
from .distillation import *
from .metrics import *
from .regularizations import *
from .unsupervised import *
from .losses import *
| 20.875
| 30
| 0.754491
| 20
| 167
| 6.3
| 0.5
| 0.396825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.161677
| 167
| 7
| 31
| 23.857143
| 0.892857
| 0.071856
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
884466422d02e33a14b7a4d1fd0b18ccc89c7331
| 49
|
py
|
Python
|
TOPSIS_Aneesh_101853025/__init__.py
|
aj8101/TOPSIS-Aneesh-101853025
|
153921e4bc88d16ec0d5ed7108b2ec71934ebfc2
|
[
"MIT"
] | null | null | null |
TOPSIS_Aneesh_101853025/__init__.py
|
aj8101/TOPSIS-Aneesh-101853025
|
153921e4bc88d16ec0d5ed7108b2ec71934ebfc2
|
[
"MIT"
] | null | null | null |
TOPSIS_Aneesh_101853025/__init__.py
|
aj8101/TOPSIS-Aneesh-101853025
|
153921e4bc88d16ec0d5ed7108b2ec71934ebfc2
|
[
"MIT"
] | null | null | null |
from TOPSIS_Aneesh_101853025.topsis import topsis
| 49
| 49
| 0.918367
| 7
| 49
| 6.142857
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 0.061224
| 49
| 1
| 49
| 49
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
88689311e5b0b853e499d326f2d99a42e06a76de
| 128
|
py
|
Python
|
poroto/none/c_wrapper.py
|
TANGO-Project/poroto
|
380c0ab9f33bead70ed71c78493e682924d7f997
|
[
"BSD-3-Clause"
] | 1
|
2018-05-22T22:53:31.000Z
|
2018-05-22T22:53:31.000Z
|
poroto/none/c_wrapper.py
|
TANGO-Project/poroto
|
380c0ab9f33bead70ed71c78493e682924d7f997
|
[
"BSD-3-Clause"
] | null | null | null |
poroto/none/c_wrapper.py
|
TANGO-Project/poroto
|
380c0ab9f33bead70ed71c78493e682924d7f997
|
[
"BSD-3-Clause"
] | null | null | null |
class CWrapper:
def __init__(self, functions, mmap, streams_map, debug):
pass
def generate(self):
pass
| 18.285714
| 60
| 0.625
| 15
| 128
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.289063
| 128
| 6
| 61
| 21.333333
| 0.824176
| 0
| 0
| 0.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
88b820401e89d270297e8cfe3beb4bd1dbbcc427
| 167
|
py
|
Python
|
app/screens/mobility.py
|
zshanahmed/mobileinsight-mobile
|
ae466c72d17655609539dfec2318b2de5c6786a8
|
[
"Apache-2.0"
] | 63
|
2017-06-30T15:04:15.000Z
|
2021-11-15T09:58:45.000Z
|
app/screens/mobility.py
|
zshanahmed/mobileinsight-mobile
|
ae466c72d17655609539dfec2318b2de5c6786a8
|
[
"Apache-2.0"
] | 28
|
2017-07-24T15:51:50.000Z
|
2022-03-13T21:13:09.000Z
|
app/screens/mobility.py
|
zshanahmed/mobileinsight-mobile
|
ae466c72d17655609539dfec2318b2de5c6786a8
|
[
"Apache-2.0"
] | 45
|
2017-07-02T13:16:37.000Z
|
2022-03-22T07:26:13.000Z
|
from . import MobileInsightScreenBase
from kivy.lang import Builder
Builder.load_file('screens/mobility.kv')
class MobilityScreen(MobileInsightScreenBase):
pass
| 20.875
| 46
| 0.820359
| 18
| 167
| 7.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107784
| 167
| 7
| 47
| 23.857143
| 0.912752
| 0
| 0
| 0
| 0
| 0
| 0.113772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
ee5bfef06a448fdfb9a09c1d6aba79220bc5f783
| 539
|
py
|
Python
|
src/keras/keras/applications/vgg16.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
src/keras/keras/applications/vgg16.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
src/keras/keras/applications/vgg16.py
|
lu791019/iii_HA_Image_Recognition_DL
|
d5f56d62af6d3aac1c216ca4ff309db08a8c9072
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import vgg16
from . import keras_modules_injection
@keras_modules_injection
def VGG16(*args, **kwargs):
return vgg16.VGG16(*args, **kwargs)
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return vgg16.decode_predictions(*args, **kwargs)
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return vgg16.preprocess_input(*args, **kwargs)
| 24.5
| 53
| 0.764378
| 64
| 539
| 6.015625
| 0.3125
| 0.155844
| 0.218182
| 0.187013
| 0.176623
| 0.176623
| 0
| 0
| 0
| 0
| 0
| 0.026201
| 0.150278
| 539
| 21
| 54
| 25.666667
| 0.81441
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| true
| 0
| 0.357143
| 0.214286
| 0.785714
| 0.071429
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
ee5f24272f22941cf917702694ba932755cc640a
| 24
|
py
|
Python
|
apps/DuelingBanditsPureExploration/algs/BR_Random/__init__.py
|
sumeetsk/NEXT-1
|
c42badbcaeb0ab79ab1f74b6303ecc3864b1c7ee
|
[
"Apache-2.0"
] | null | null | null |
apps/DuelingBanditsPureExploration/algs/BR_Random/__init__.py
|
sumeetsk/NEXT-1
|
c42badbcaeb0ab79ab1f74b6303ecc3864b1c7ee
|
[
"Apache-2.0"
] | null | null | null |
apps/DuelingBanditsPureExploration/algs/BR_Random/__init__.py
|
sumeetsk/NEXT-1
|
c42badbcaeb0ab79ab1f74b6303ecc3864b1c7ee
|
[
"Apache-2.0"
] | null | null | null |
from .BR_Random import *
| 24
| 24
| 0.791667
| 4
| 24
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c99479f3007824b49a0ee91c88b7841a29e1b837
| 17
|
py
|
Python
|
algorithms/dvr.py
|
alv16106/RoutingLab
|
20e49d98290ef36dab78baeef7bc99fa4d36af4d
|
[
"MIT"
] | null | null | null |
algorithms/dvr.py
|
alv16106/RoutingLab
|
20e49d98290ef36dab78baeef7bc99fa4d36af4d
|
[
"MIT"
] | null | null | null |
algorithms/dvr.py
|
alv16106/RoutingLab
|
20e49d98290ef36dab78baeef7bc99fa4d36af4d
|
[
"MIT"
] | null | null | null |
def dvr():
pass
| 8.5
| 10
| 0.588235
| 3
| 17
| 3.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 17
| 2
| 11
| 8.5
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
4e5f0ba7fb5a685dc84790ed098cc4eb30d50712
| 57
|
py
|
Python
|
li_hang/knn/__init__.py
|
LucienShui/HelloMachineLearning
|
b00a4b3791808ace3b1e45112350c2b3c539995e
|
[
"Apache-2.0"
] | 2
|
2019-07-28T08:25:40.000Z
|
2019-07-29T05:29:10.000Z
|
li_hang/knn/__init__.py
|
LucienShui/HelloMachineLearning
|
b00a4b3791808ace3b1e45112350c2b3c539995e
|
[
"Apache-2.0"
] | null | null | null |
li_hang/knn/__init__.py
|
LucienShui/HelloMachineLearning
|
b00a4b3791808ace3b1e45112350c2b3c539995e
|
[
"Apache-2.0"
] | null | null | null |
from knn.base_knn import BaseKNN
from knn.knn import KNN
| 19
| 32
| 0.824561
| 11
| 57
| 4.181818
| 0.454545
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 57
| 2
| 33
| 28.5
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4e6b445586321bcc44b39541ac44d8498d7d10eb
| 4,847
|
py
|
Python
|
tests/test_get_super_records_from_interval_tree.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
tests/test_get_super_records_from_interval_tree.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
tests/test_get_super_records_from_interval_tree.py
|
leoisl/vcf_consensus_builder
|
1e2f0312810b183edf368b33086475318a779b87
|
[
"MIT"
] | null | null | null |
import unittest
from io import StringIO
from vcf_consensus_builder.vcf_io import read_vcf
from vcf_consensus_builder.vcf_consensus_builder_core import get_super_records_from_interval_tree, InconsistentVCFException
from intervaltree import Interval, IntervalTree
class Test_get_gt_from_sample_info(unittest.TestCase):
def test_get_super_records_from_interval_tree___no_overlaps___everything_is_a_super_record(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')), Interval(3, 8, ('ACCGT', 'CCCC')), Interval(10, 13, ('GGA', 'TTT'))])
expected = get_super_records_from_interval_tree(interval_tree)
actual = interval_tree
self.assertEqual(actual, expected)
def test_get_super_records_from_interval_tree___several_overlaps(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(3, 4, ('A', 'C')),
Interval(3, 5, ('AC', 'CC')),
Interval(3, 6, ('ACC', 'CCC')),
Interval(3, 7, ('ACCG', 'CCC')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
expected = IntervalTree([Interval(2, 3, ('G', 'A')), Interval(3, 8, ('ACCGT', 'CCCC')), Interval(10, 13, ('GGA', 'TTT'))])
actual = get_super_records_from_interval_tree(interval_tree)
self.assertEqual(actual, expected)
def test_get_super_records_from_interval_tree___one_overlap_begin_match(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(3, 10, ('ACCGTGG', 'CCCCA')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
expected = IntervalTree(
[Interval(2, 3, ('G', 'A')), Interval(3, 10, ('ACCGTGG', 'CCCCA')), Interval(10, 13, ('GGA', 'TTT'))])
actual = get_super_records_from_interval_tree(interval_tree)
self.assertEqual(actual, expected)
def test_get_super_records_from_interval_tree___one_overlap_totally_inside(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(4, 7, ('CCG', 'CC')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
expected = IntervalTree(
[Interval(2, 3, ('G', 'A')), Interval(3, 8, ('ACCGT', 'CCCC')), Interval(10, 13, ('GGA', 'TTT'))])
actual = get_super_records_from_interval_tree(interval_tree)
self.assertEqual(actual, expected)
def test_get_super_records_from_interval_tree___one_overlap_end_match(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(6, 8, ('GT', 'CC')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
expected = IntervalTree(
[Interval(2, 3, ('G', 'A')), Interval(3, 8, ('ACCGT', 'CCCC')), Interval(10, 13, ('GGA', 'TTT'))])
actual = get_super_records_from_interval_tree(interval_tree)
self.assertEqual(actual, expected)
def test_get_super_records_from_interval_tree___one_overlap_ref_not_consistent_in_the_begin(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(3, 5, ('AG', 'CC')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
self.assertRaises(InconsistentVCFException, get_super_records_from_interval_tree, interval_tree)
def test_get_super_records_from_interval_tree___one_overlap_ref_not_consistent_in_the_middle(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(5, 6, ('A', 'C')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
self.assertRaises(InconsistentVCFException, get_super_records_from_interval_tree, interval_tree)
def test_get_super_records_from_interval_tree___one_overlap_ref_not_consistent_in_the_end(self):
interval_tree = IntervalTree([Interval(2, 3, ('G', 'A')),
Interval(7, 8, ('A', 'C')),
Interval(3, 8, ('ACCGT', 'CCCC')),
Interval(10, 13, ('GGA', 'TTT'))])
self.assertRaises(InconsistentVCFException, get_super_records_from_interval_tree, interval_tree)
| 62.141026
| 135
| 0.552919
| 525
| 4,847
| 4.748571
| 0.139048
| 0.163658
| 0.102286
| 0.129563
| 0.863217
| 0.842359
| 0.829924
| 0.829924
| 0.797433
| 0.797433
| 0
| 0.035088
| 0.306169
| 4,847
| 77
| 136
| 62.948052
| 0.706215
| 0
| 0
| 0.567164
| 0
| 0
| 0.05261
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 1
| 0.119403
| false
| 0
| 0.074627
| 0
| 0.208955
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4e77e2636e8bae731b5a29b01f700bfd6f96998b
| 38
|
py
|
Python
|
qflow/samplers/__init__.py
|
johanere/qflow
|
5453cd5c3230ad7f082adf9ec1aea63ab0a4312a
|
[
"MIT"
] | 5
|
2019-07-24T21:46:24.000Z
|
2021-06-11T18:18:24.000Z
|
qflow/samplers/__init__.py
|
johanere/qflow
|
5453cd5c3230ad7f082adf9ec1aea63ab0a4312a
|
[
"MIT"
] | 22
|
2019-02-19T10:49:26.000Z
|
2019-07-18T09:42:13.000Z
|
qflow/samplers/__init__.py
|
bsamseth/FYS4411
|
72b879e7978364498c48fc855b5df676c205f211
|
[
"MIT"
] | 2
|
2020-11-04T15:17:24.000Z
|
2021-11-03T16:37:38.000Z
|
from _qflow_backend.samplers import *
| 19
| 37
| 0.842105
| 5
| 38
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4e846701f3ef6faca608f8391ba8b6e5c5efaa48
| 26
|
py
|
Python
|
env/Lib/site-packages/win32/trace/__init__.py
|
Daniel-Key/HearStone-Python
|
981584d2b9502319393bd92b48f0ec8d906b4d44
|
[
"MIT"
] | null | null | null |
env/Lib/site-packages/win32/trace/__init__.py
|
Daniel-Key/HearStone-Python
|
981584d2b9502319393bd92b48f0ec8d906b4d44
|
[
"MIT"
] | 1
|
2020-10-27T14:44:08.000Z
|
2020-10-27T14:44:08.000Z
|
env/Lib/site-packages/win32/trace/__init__.py
|
Daniel-Key/HearStone-Python
|
981584d2b9502319393bd92b48f0ec8d906b4d44
|
[
"MIT"
] | null | null | null |
from win32._trace import *
| 26
| 26
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.115385
| 26
| 1
| 26
| 26
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4eaacb9a993cb7937382b6a8c692577be97599c2
| 48
|
py
|
Python
|
ex.py
|
MayZinThwe/python-exercises
|
7a7bcd7b0a967efb8e5140ab2486036a1defa551
|
[
"MIT"
] | null | null | null |
ex.py
|
MayZinThwe/python-exercises
|
7a7bcd7b0a967efb8e5140ab2486036a1defa551
|
[
"MIT"
] | null | null | null |
ex.py
|
MayZinThwe/python-exercises
|
7a7bcd7b0a967efb8e5140ab2486036a1defa551
|
[
"MIT"
] | null | null | null |
print("Hello ! Welcome to my python exercises")
| 24
| 47
| 0.75
| 7
| 48
| 5.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 1
| 48
| 48
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
14d074f25c20677fce7cfabae54b6f08dabd5e8c
| 41
|
py
|
Python
|
apiai_assistant/__init__.py
|
toasterco/apiaiassistant
|
7f3f0693c4c5aa9f1fd4486f85ebe05080505dc8
|
[
"MIT"
] | 6
|
2017-08-10T16:08:03.000Z
|
2018-08-03T23:36:20.000Z
|
apiai_assistant/__init__.py
|
toasterco/apiaiassistant
|
7f3f0693c4c5aa9f1fd4486f85ebe05080505dc8
|
[
"MIT"
] | 1
|
2018-03-23T14:12:36.000Z
|
2018-03-23T15:40:33.000Z
|
apiai_assistant/__init__.py
|
toasterco/apiaiassistant
|
7f3f0693c4c5aa9f1fd4486f85ebe05080505dc8
|
[
"MIT"
] | null | null | null |
from .assistant import Assistant # NOQA
| 20.5
| 40
| 0.780488
| 5
| 41
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 41
| 1
| 41
| 41
| 0.941176
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
093c834f97c9fe7e18631ccb68fcd91e27eaaaa6
| 29
|
py
|
Python
|
Configuration/__init__.py
|
olmedoluis/pix
|
872fc75a3cef0d8cb152b1565a831874b9fd3fb5
|
[
"MIT"
] | null | null | null |
Configuration/__init__.py
|
olmedoluis/pix
|
872fc75a3cef0d8cb152b1565a831874b9fd3fb5
|
[
"MIT"
] | null | null | null |
Configuration/__init__.py
|
olmedoluis/pix
|
872fc75a3cef0d8cb152b1565a831874b9fd3fb5
|
[
"MIT"
] | null | null | null |
from .Aliases import ALIASES
| 14.5
| 28
| 0.827586
| 4
| 29
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
093c91e7862f4bc40d6e3817aae211a2aa97b756
| 151
|
py
|
Python
|
src/artifice/paper/routes/about/views.py
|
artifice-project/artifice-paper
|
c20e863ced5364fbdd9142d4e336067504c8341c
|
[
"MIT"
] | null | null | null |
src/artifice/paper/routes/about/views.py
|
artifice-project/artifice-paper
|
c20e863ced5364fbdd9142d4e336067504c8341c
|
[
"MIT"
] | null | null | null |
src/artifice/paper/routes/about/views.py
|
artifice-project/artifice-paper
|
c20e863ced5364fbdd9142d4e336067504c8341c
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
about_blueprint = Blueprint('about', __name__)
@about_blueprint.route('/')
def index():
return '<h1>about page</h1>'
| 18.875
| 46
| 0.715232
| 19
| 151
| 5.368421
| 0.631579
| 0.27451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015267
| 0.13245
| 151
| 7
| 47
| 21.571429
| 0.763359
| 0
| 0
| 0
| 0
| 0
| 0.165563
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.6
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
|
0
| 6
|
1198fa49430f402b6046690066983f8f6c69ea77
| 218
|
py
|
Python
|
src/sqlalchemy_fp/setup_with_session.py
|
jackfirth/sqlalchemy-fp
|
d095f644431ebaa3c698e3aa37e28189d4772772
|
[
"MIT"
] | null | null | null |
src/sqlalchemy_fp/setup_with_session.py
|
jackfirth/sqlalchemy-fp
|
d095f644431ebaa3c698e3aa37e28189d4772772
|
[
"MIT"
] | null | null | null |
src/sqlalchemy_fp/setup_with_session.py
|
jackfirth/sqlalchemy-fp
|
d095f644431ebaa3c698e3aa37e28189d4772772
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from sqlalchemy.orm import sessionmaker
from .with_session_from import with_session_from
def setup_with_session(engine):
return with_session_from(sessionmaker(bind=engine))
| 27.25
| 55
| 0.853211
| 30
| 218
| 5.766667
| 0.466667
| 0.254335
| 0.260116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 218
| 7
| 56
| 31.142857
| 0.882653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
11c4cefd849149465abe5ce78f36bd7bdac9dab7
| 71
|
py
|
Python
|
src/tests/TestFlexioFlowConfig.py
|
flexiooss/flexio-flow
|
47491c7e5b49a02dc859028de0d486edc0014b26
|
[
"Apache-2.0"
] | null | null | null |
src/tests/TestFlexioFlowConfig.py
|
flexiooss/flexio-flow
|
47491c7e5b49a02dc859028de0d486edc0014b26
|
[
"Apache-2.0"
] | 44
|
2019-04-05T06:08:15.000Z
|
2021-09-13T19:37:49.000Z
|
src/tests/TestFlexioFlowConfig.py
|
flexiooss/flexio-flow
|
47491c7e5b49a02dc859028de0d486edc0014b26
|
[
"Apache-2.0"
] | null | null | null |
import unittest
class TestFlexioFlowConfig(unittest.TestCase):
pass
| 14.2
| 46
| 0.830986
| 7
| 71
| 8.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 5
| 47
| 14.2
| 0.936508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
eef9a7e394a497907d65359f804d4d824d647dea
| 134
|
py
|
Python
|
gsfarc/gptool/parameter/templates/floatarray.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | 1
|
2021-11-06T18:36:28.000Z
|
2021-11-06T18:36:28.000Z
|
gsfarc/gptool/parameter/templates/floatarray.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | null | null | null |
gsfarc/gptool/parameter/templates/floatarray.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | null | null | null |
"""
"""
from .basicarray import BASICARRAY
class FLOATARRAY(BASICARRAY): pass
def template():
return FLOATARRAY('GPDouble')
| 10.307692
| 34
| 0.701493
| 13
| 134
| 7.230769
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171642
| 134
| 12
| 35
| 11.166667
| 0.846847
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
014595580c58dd07e95e136c0dcb369a698c0030
| 28
|
py
|
Python
|
libsaas/services/stripe/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 155
|
2015-01-27T15:17:59.000Z
|
2022-02-20T00:14:08.000Z
|
libsaas/services/stripe/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 14
|
2015-01-12T08:22:37.000Z
|
2021-06-16T19:49:31.000Z
|
libsaas/services/stripe/__init__.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 43
|
2015-01-28T22:41:45.000Z
|
2021-09-21T04:44:26.000Z
|
from .service import Stripe
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
017d813ce598faf8267045475d05bc2ecd51a224
| 2,701
|
py
|
Python
|
Voltron/unit_tests/Algorithms/test_binary_search.py
|
ernestyalumni/HrdwCCppCUDA
|
17ed937dea06431a4d5ca103f993ea69a6918734
|
[
"MIT"
] | 1
|
2018-02-09T19:44:51.000Z
|
2018-02-09T19:44:51.000Z
|
Voltron/unit_tests/Algorithms/test_binary_search.py
|
ernestyalumni/HrdwCCppCUDA
|
17ed937dea06431a4d5ca103f993ea69a6918734
|
[
"MIT"
] | null | null | null |
Voltron/unit_tests/Algorithms/test_binary_search.py
|
ernestyalumni/HrdwCCppCUDA
|
17ed937dea06431a4d5ca103f993ea69a6918734
|
[
"MIT"
] | null | null | null |
from Voltron.Algorithms.binary_search import (
binary_search,
binary_search_iterative,
binary_search_recursive,
calculate_midpoint,
quick_calculate_midpoint_index)
import pytest
def test_calculate_midpoint_works_for_odd_number_of_elements():
x = calculate_midpoint(0, 4)
assert x == 2
x = calculate_midpoint(0, 6)
assert x == 3
x = calculate_midpoint(1, 5)
assert x == 3
x = calculate_midpoint(1, 7)
assert x == 4
x = calculate_midpoint(2, 6)
assert x == 4
x = calculate_midpoint(2, 8)
assert x == 5
def test_calculate_midpoint_gets_left_index_for_even_number_of_elements():
x = calculate_midpoint(0, 3)
assert x == 1
x = calculate_midpoint(0, 5)
assert x == 2
x = calculate_midpoint(1, 6)
assert x == 3
x = calculate_midpoint(1, 8)
assert x == 4
x = calculate_midpoint(2, 7)
assert x == 4
x = calculate_midpoint(2, 9)
assert x == 5
def test_quick_calculate_midpoint_index_for_odd_number_of_elements():
x = quick_calculate_midpoint_index(0, 4)
assert x == 2
x = quick_calculate_midpoint_index(0, 6)
assert x == 3
x = quick_calculate_midpoint_index(1, 5)
assert x == 3
x = quick_calculate_midpoint_index(1, 7)
assert x == 4
x = quick_calculate_midpoint_index(2, 6)
assert x == 4
x = quick_calculate_midpoint_index(2, 8)
assert x == 5
def test_quick_calculate_midpoint_index_for_even_number_of_elements():
x = quick_calculate_midpoint_index(0, 3)
assert x == 1
x = quick_calculate_midpoint_index(0, 5)
assert x == 2
x = quick_calculate_midpoint_index(1, 6)
assert x == 3
x = quick_calculate_midpoint_index(1, 8)
assert x == 4
x = quick_calculate_midpoint_index(2, 7)
assert x == 4
x = quick_calculate_midpoint_index(2, 9)
assert x == 5
def test_binary_search_recursive_works():
element = 18
array = [1, 2, 5, 7, 13, 15, 16, 18, 24, 28, 29]
result = binary_search_recursive(array, element, 0, len(array) - 1)
assert result == 7
# Another example.
element = 20
array = [4, 14, 16, 17, 19, 21, 24, 28, 30, 35, 36, 38, 39, 40, 41, 43]
result = binary_search_recursive(array, element, 0, len(array) - 1)
assert result == None
def test_binary_search_iterative_works():
element = 18
array = [1, 2, 5, 7, 13, 15, 16, 18, 24, 28, 29]
result = binary_search_iterative(array, element)
assert result == 7
# Another example.
element = 20
array = [4, 14, 16, 17, 19, 21, 24, 28, 30, 35, 36, 38, 39, 40, 41, 43]
result = binary_search_iterative(array, element)
assert result == None
| 20.618321
| 75
| 0.6505
| 408
| 2,701
| 4.036765
| 0.154412
| 0.309654
| 0.200364
| 0.245902
| 0.831815
| 0.831815
| 0.756527
| 0.634487
| 0.489982
| 0.367942
| 0
| 0.090596
| 0.248056
| 2,701
| 131
| 76
| 20.618321
| 0.720335
| 0.012218
| 0
| 0.519481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.363636
| 1
| 0.077922
| false
| 0
| 0.025974
| 0
| 0.103896
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6d85fb203936a09fec60dbccfd35255608e5ecac
| 35
|
py
|
Python
|
src/tso/tsocli/command/__init__.py
|
elijah-ward/TSO
|
610565a32284cab23e9262c3431ce6d34116bfcf
|
[
"MIT"
] | 4
|
2018-11-05T21:36:08.000Z
|
2019-04-15T13:05:39.000Z
|
src/tso/tsocli/command/__init__.py
|
elijah-ward/TSO
|
610565a32284cab23e9262c3431ce6d34116bfcf
|
[
"MIT"
] | 2
|
2019-02-23T07:13:40.000Z
|
2019-04-07T17:50:44.000Z
|
src/tso/tsocli/command/__init__.py
|
elijah-ward/TSO
|
610565a32284cab23e9262c3431ce6d34116bfcf
|
[
"MIT"
] | 2
|
2020-12-09T07:03:09.000Z
|
2021-07-17T02:32:46.000Z
|
from .pipeline import cli_pipeline
| 17.5
| 34
| 0.857143
| 5
| 35
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d9447fc40311c76af67867565bd9d30f8ca2d3f
| 56
|
py
|
Python
|
maha/parsers/functions/__init__.py
|
saedx1/Maha
|
6158b07cd0d4ff3dcb529c9c49757f8271dc776e
|
[
"BSD-3-Clause"
] | 152
|
2021-09-18T08:18:47.000Z
|
2022-03-14T13:23:17.000Z
|
maha/parsers/functions/__init__.py
|
saedx1/Maha
|
6158b07cd0d4ff3dcb529c9c49757f8271dc776e
|
[
"BSD-3-Clause"
] | 65
|
2021-09-20T06:00:41.000Z
|
2022-03-20T22:44:39.000Z
|
maha/parsers/functions/__init__.py
|
saedx1/Maha
|
6158b07cd0d4ff3dcb529c9c49757f8271dc776e
|
[
"BSD-3-Clause"
] | 10
|
2021-09-18T11:56:57.000Z
|
2021-11-20T09:05:16.000Z
|
from .parse_dimensions import *
from .parse_fn import *
| 18.666667
| 31
| 0.785714
| 8
| 56
| 5.25
| 0.625
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 32
| 28
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6db1ace45f02c1eed6ed534d5be30ab9f13fb813
| 258
|
py
|
Python
|
pystibmvib/__init__.py
|
helldog136/pystibmvib
|
390f58c13bec3b1b868955cde1d88fb99d649808
|
[
"MIT"
] | 4
|
2020-02-28T00:28:54.000Z
|
2021-03-03T17:13:53.000Z
|
pystibmvib/__init__.py
|
helldog136/pystibmvib
|
390f58c13bec3b1b868955cde1d88fb99d649808
|
[
"MIT"
] | 3
|
2020-03-05T09:03:36.000Z
|
2020-05-25T19:59:12.000Z
|
pystibmvib/__init__.py
|
helldog136/pystibmvib
|
390f58c13bec3b1b868955cde1d88fb99d649808
|
[
"MIT"
] | 3
|
2020-03-26T16:56:28.000Z
|
2021-03-03T15:01:35.000Z
|
"""Initialize the package."""
from pystibmvib.client import AbstractSTIBAPIClient, STIBAPIClient
from pystibmvib.service import STIBService, InvalidLineFilterException
from pystibmvib.service import ShapefileService
from .domain import *
NAME = "pystibmvib"
| 36.857143
| 70
| 0.841085
| 25
| 258
| 8.68
| 0.6
| 0.193548
| 0.193548
| 0.248848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 258
| 7
| 71
| 36.857143
| 0.92735
| 0.089147
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6db6657ac576abbe50f872b403fdf4609693666f
| 155
|
py
|
Python
|
exercicios/ex097 - um print especial.py
|
ErisonSandro/Exercicios-Python
|
7fd391fa87a25635cf85921b303f87d9f46854ee
|
[
"MIT"
] | null | null | null |
exercicios/ex097 - um print especial.py
|
ErisonSandro/Exercicios-Python
|
7fd391fa87a25635cf85921b303f87d9f46854ee
|
[
"MIT"
] | null | null | null |
exercicios/ex097 - um print especial.py
|
ErisonSandro/Exercicios-Python
|
7fd391fa87a25635cf85921b303f87d9f46854ee
|
[
"MIT"
] | null | null | null |
def escreva(tam):
print('='*len(tam))
print(tam)
print('='*len(tam))
escreva('Nene Sandro')
escreva('Curso python do youtube')
escreva('CeV')
| 17.222222
| 34
| 0.625806
| 21
| 155
| 4.619048
| 0.571429
| 0.247423
| 0.226804
| 0.28866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 155
| 9
| 35
| 17.222222
| 0.746154
| 0
| 0
| 0.285714
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
6dc793dbceaaa7c94a1de5db2221d1c7637d04e6
| 33
|
py
|
Python
|
sepc/self_mmdet/ops/dcn/__init__.py
|
implus/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | 2
|
2020-04-27T06:30:32.000Z
|
2020-04-27T06:30:34.000Z
|
sepc/self_mmdet/ops/dcn/__init__.py
|
yhl41001/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | null | null | null |
sepc/self_mmdet/ops/dcn/__init__.py
|
yhl41001/SEPC
|
51e24ace1653cba6d3bc0ab536c6adb3b956c8dd
|
[
"Apache-2.0"
] | 1
|
2021-03-23T01:39:30.000Z
|
2021-03-23T01:39:30.000Z
|
from .sepc_dconv import sepc_conv
| 33
| 33
| 0.878788
| 6
| 33
| 4.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6ddcebed8ed47e00da600032d6bb96cc07e6662a
| 118
|
py
|
Python
|
data/__all_models.py
|
k-shnyrev/Mos-Quest
|
c48b05433f493fa95af98d9ae837583f4d28af94
|
[
"MIT"
] | null | null | null |
data/__all_models.py
|
k-shnyrev/Mos-Quest
|
c48b05433f493fa95af98d9ae837583f4d28af94
|
[
"MIT"
] | null | null | null |
data/__all_models.py
|
k-shnyrev/Mos-Quest
|
c48b05433f493fa95af98d9ae837583f4d28af94
|
[
"MIT"
] | null | null | null |
"""
Подключает модели для работы с базой данных
"""
from . import users
from . import questions
from . import answers
| 16.857143
| 43
| 0.745763
| 16
| 118
| 5.5
| 0.75
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177966
| 118
| 6
| 44
| 19.666667
| 0.907216
| 0.364407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a304eaa2bcff84ca69e7d91c6856dd68f1a294bc
| 21
|
py
|
Python
|
tests/workflowtests/test_files/workflow_manager/wf_instances_prepared/instance1/t1.py
|
soerenray/MatFlow
|
db0c8311262738264f1c525b8266a2bf52a7b7e6
|
[
"MIT"
] | null | null | null |
tests/workflowtests/test_files/workflow_manager/wf_instances_prepared/instance1/t1.py
|
soerenray/MatFlow
|
db0c8311262738264f1c525b8266a2bf52a7b7e6
|
[
"MIT"
] | null | null | null |
tests/workflowtests/test_files/workflow_manager/wf_instances_prepared/instance1/t1.py
|
soerenray/MatFlow
|
db0c8311262738264f1c525b8266a2bf52a7b7e6
|
[
"MIT"
] | null | null | null |
"im a dag def file!"
| 10.5
| 20
| 0.619048
| 5
| 21
| 2.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 21
| 1
| 21
| 21
| 0.8125
| 0.857143
| 0
| 0
| 0
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0967a8aa3a6e383b321020d6670d35cc747826b5
| 2,031
|
py
|
Python
|
gameComponents/gameComparison.py
|
Ty-Allen/Allen_T_RPS_Fall2020
|
ed955896504af65c55829095d05a0599e7284631
|
[
"MIT"
] | null | null | null |
gameComponents/gameComparison.py
|
Ty-Allen/Allen_T_RPS_Fall2020
|
ed955896504af65c55829095d05a0599e7284631
|
[
"MIT"
] | null | null | null |
gameComponents/gameComparison.py
|
Ty-Allen/Allen_T_RPS_Fall2020
|
ed955896504af65c55829095d05a0599e7284631
|
[
"MIT"
] | null | null | null |
from gameComponents import gameVars
from random import randint
# # this will be the AI choice -> a random pick from the choices array
# computer_choice = gameVars.choices[randint(0, 2)]
# # just validating that I can make a choice
# # print outputs whatever is inside the brackets
# # check to see what the user input
# print("user chose: " + gameVars.user_choice)
# # validate that the random choice worked for the AI
# print("AI chose: " + computer_choice)
def comparison(user_choice):
# this will be the AI choice -> a random pick from the choices array
computer_choice = gameVars.choices[randint(0, 2)]
# just validating that I can make a choice
# print outputs whatever is inside the brackets
# check to see what the user input
print("user chose: " + gameVars.user_choice)
# validate that the random choice worked for the AI
print("AI chose: " + computer_choice)
if (computer_choice == gameVars.user_choice):
print("tie")
elif (computer_choice == "rock"):
if (gameVars.user_choice == "scissors"):
gameVars.user_lives -= 1
print("You lose! player lives:", gameVars.user_lives)
else:
print("""
_______
---' ____)____
______)
_______)
_______)
---.__________) You win!
""")
gameVars.computer_lives -= 1
elif (computer_choice == "paper"):
if (gameVars.user_choice == "scissors"):
print("""
_______
---' ____)____
______)
__________)
(____)
---.__(___) You win!
""")
gameVars.computer_lives -= 1
else:
gameVars.user_lives -= 1
print("You lose! player lives:", gameVars.user_lives)
elif (computer_choice == "scissors"):
if (gameVars.user_choice == "paper"):
gameVars.user_lives -= 1
print("You lose! player lives:", gameVars.user_lives)
else:
print("""
_______
---' ____)
(_____)
(_____)
(____)
---.__(___) You win!
""")
gameVars.computer_lives -= 1
| 27.445946
| 70
| 0.624323
| 231
| 2,031
| 4.887446
| 0.242424
| 0.127547
| 0.09566
| 0.053144
| 0.805137
| 0.755536
| 0.755536
| 0.726306
| 0.726306
| 0.726306
| 0
| 0.006645
| 0.258986
| 2,031
| 73
| 71
| 27.821918
| 0.743522
| 0.303791
| 0
| 0.7
| 0
| 0
| 0.391679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.04
| 0
| 0.06
| 0.18
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0981df23baad1e3539b52fe250ded26d372793bc
| 31
|
py
|
Python
|
dttpy/__init__.py
|
neouniverse/dttpy
|
c5ff8870d796d84b39c4e6f82ec4eefe523cc3e7
|
[
"MIT"
] | null | null | null |
dttpy/__init__.py
|
neouniverse/dttpy
|
c5ff8870d796d84b39c4e6f82ec4eefe523cc3e7
|
[
"MIT"
] | null | null | null |
dttpy/__init__.py
|
neouniverse/dttpy
|
c5ff8870d796d84b39c4e6f82ec4eefe523cc3e7
|
[
"MIT"
] | null | null | null |
#
from .dttdata import DttData
| 10.333333
| 28
| 0.774194
| 4
| 31
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 31
| 2
| 29
| 15.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0994a1d7e4bb4b426765a910c6895428b3f3c6ec
| 22
|
py
|
Python
|
projecc/__init__.py
|
logan-pearce/projecc
|
b75005124493309402e7102f43acab360c994f14
|
[
"MIT"
] | null | null | null |
projecc/__init__.py
|
logan-pearce/projecc
|
b75005124493309402e7102f43acab360c994f14
|
[
"MIT"
] | null | null | null |
projecc/__init__.py
|
logan-pearce/projecc
|
b75005124493309402e7102f43acab360c994f14
|
[
"MIT"
] | null | null | null |
from .projecc import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
099e8cb176868b56924e1749e8be87b8a43b305a
| 31
|
py
|
Python
|
streamsvg/__init__.py
|
ysig/streamsvg
|
ee906fadacf0b016e519548cb2fea21c27748f51
|
[
"MIT"
] | null | null | null |
streamsvg/__init__.py
|
ysig/streamsvg
|
ee906fadacf0b016e519548cb2fea21c27748f51
|
[
"MIT"
] | null | null | null |
streamsvg/__init__.py
|
ysig/streamsvg
|
ee906fadacf0b016e519548cb2fea21c27748f51
|
[
"MIT"
] | null | null | null |
from .StreamSVG import Drawing
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
09f43206a23393dc138c5147013c4fe0a841a1a1
| 189
|
py
|
Python
|
iris_sdk/models/data/listing_name.py
|
NumberAI/python-bandwidth-iris
|
0e05f79d68b244812afb97e00fd65b3f46d00aa3
|
[
"MIT"
] | 2
|
2020-04-13T13:47:59.000Z
|
2022-02-23T20:32:41.000Z
|
iris_sdk/models/data/listing_name.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2020-09-18T20:59:24.000Z
|
2021-08-25T16:51:42.000Z
|
iris_sdk/models/data/listing_name.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2018-12-12T14:39:50.000Z
|
2020-11-17T21:42:29.000Z
|
#!/usr/bin/env python
from iris_sdk.models.base_resource import BaseData
from iris_sdk.models.maps.listing_name import ListingNameMap
class ListingName(ListingNameMap, BaseData):
pass
| 27
| 60
| 0.825397
| 26
| 189
| 5.846154
| 0.730769
| 0.105263
| 0.144737
| 0.223684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100529
| 189
| 7
| 61
| 27
| 0.894118
| 0.10582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
1129b3d57efab1e8a920d2384648783ef2ad4acd
| 272
|
py
|
Python
|
neuralcorefres/feature_extraction/__init__.py
|
RyanElliott10/NeuralCorefRes
|
a0ca5c614cc1638ab7bd230fcfefbd26120ed800
|
[
"MIT"
] | 2
|
2020-02-23T01:00:22.000Z
|
2020-06-17T21:39:57.000Z
|
neuralcorefres/feature_extraction/__init__.py
|
RyanElliott10/NeuralCorefRes
|
a0ca5c614cc1638ab7bd230fcfefbd26120ed800
|
[
"MIT"
] | 9
|
2020-02-27T01:08:55.000Z
|
2022-03-12T00:16:12.000Z
|
neuralcorefres/feature_extraction/__init__.py
|
RyanElliott10/NeuralCorefRes
|
a0ca5c614cc1638ab7bd230fcfefbd26120ed800
|
[
"MIT"
] | null | null | null |
from neuralcorefres.feature_extraction.gender_classifier import *
from neuralcorefres.feature_extraction.stanford_parse_api import *
from neuralcorefres.feature_extraction.util import *
__all__ = [
"GenderClassifier",
"StanfordParseAPI",
"findall_entities"
]
| 27.2
| 66
| 0.808824
| 26
| 272
| 8.038462
| 0.615385
| 0.258373
| 0.358852
| 0.502392
| 0.392345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 272
| 9
| 67
| 30.222222
| 0.870833
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
3a0886451e0c34d2a759ea54b7b873f34baba340
| 3,211
|
py
|
Python
|
tests/test_find.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | null | null | null |
tests/test_find.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | 12
|
2021-12-14T15:10:43.000Z
|
2022-03-31T13:39:25.000Z
|
tests/test_find.py
|
arup-group/mc
|
50b8faa8b9d40dece88e0a27f911edd427ebc064
|
[
"MIT"
] | null | null | null |
"""
Find method tests.
"""
import pytest
import env
env.set_module()
from mc.base import BaseConfig
def test_test_env_paths():
assert env.test_xml_path.exists()
@pytest.fixture
def config():
return BaseConfig(path=env.test_xml_path)
def test_find_nothing(config):
assert config.find('') == []
def test_find_module(config):
assert config.find("controler")[0].class_type == 'module'
def test_find_param_at_module_level(config):
params = config.find("transitModes")
assert len(params) == 1
assert params[0].value == 'bus,train'
def test_find_param_at_paramset_level(config):
params = config.find("earlyDeparture")
assert len(params) == 2
assert params[0].value == '-0.0'
def test_find_module_param_at_module_level(config):
params = config.find("transit/transitModes")
assert len(params) == 1
assert params[0].value == 'bus,train'
def test_find_all_param_at_module_level(config):
params = config.find("*/transitModes")
assert len(params) == 1
assert params[0].value == 'bus,train'
def test_find_paramset_at_module_level(config):
paramsets = config.find("scoringParameters:default")
assert len(paramsets) == 1
assert paramsets[0].class_type == 'paramset'
def test_find_paramsets_at_module_level(config):
paramsets = config.find("scoringParameters:*")
assert len(paramsets) == 2
assert paramsets[0].class_type == 'paramset'
def test_find_module_paramset_at_module_level(config):
paramsets = config.find("planCalcScore/scoringParameters:default")
assert len(paramsets) == 1
assert paramsets[0].class_type == 'paramset'
def test_find_module_paramsets_at_module_level(config):
paramsets = config.find("planCalcScore/scoringParameters:*")
assert len(paramsets) == 2
assert paramsets[0].class_type == 'paramset'
def test_find_all_paramset_at_module_level(config):
paramsets = config.find("*/scoringParameters:default")
assert len(paramsets) == 1
assert paramsets[0].class_type == 'paramset'
def test_find_all_paramsets_at_module_level(config):
paramsets = config.find("*/scoringParameters:*")
assert len(paramsets) == 2
assert paramsets[0].class_type == 'paramset'
def test_find_paramsets_at_paramsets_level(config):
paramsets = config.find("scoringParameters:*/activityParams:*")
assert len(paramsets) == 6
assert paramsets[0].class_type == 'paramset'
def test_find_paramset_at_paramsets_level(config):
paramsets = config.find("scoringParameters:default/activityParams:*")
assert len(paramsets) == 3
assert paramsets[0].class_type == 'paramset'
def test_find_param_at_paramsets_level(config):
paramsets = config.find("scoringParameters:default/activityParams:work/priority")
assert len(paramsets) == 1
assert paramsets[0].value == '1.0'
def test_find_params_at_paramsets_level(config):
paramsets = config.find("activityParams:work/priority")
assert len(paramsets) == 2
assert paramsets[0].value == '1.0'
def test_find_params_at_nested_paramsets_level(config):
paramsets = config.find("scoringParameters:default/priority")
assert len(paramsets) == 3
assert paramsets[0].value == '1.0'
| 27.211864
| 85
| 0.730925
| 412
| 3,211
| 5.446602
| 0.126214
| 0.05615
| 0.083333
| 0.127451
| 0.816845
| 0.790998
| 0.770945
| 0.712567
| 0.634135
| 0.549465
| 0
| 0.014213
| 0.145438
| 3,211
| 117
| 86
| 27.444444
| 0.803571
| 0.005606
| 0
| 0.369863
| 0
| 0
| 0.168603
| 0.106436
| 0
| 0
| 0
| 0
| 0.452055
| 1
| 0.260274
| false
| 0
| 0.041096
| 0.013699
| 0.315068
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3a08bc1756cf760025902de0fb56b3a464b05480
| 67
|
py
|
Python
|
odin/visual/__init__.py
|
trungnt13/odin_old
|
e5f44f9b6c483d6498767899315ae56e06fe36c4
|
[
"MIT"
] | 2
|
2016-02-24T20:41:08.000Z
|
2016-02-29T02:25:16.000Z
|
odin/visual/__init__.py
|
trungnt13/odin
|
e5f44f9b6c483d6498767899315ae56e06fe36c4
|
[
"MIT"
] | null | null | null |
odin/visual/__init__.py
|
trungnt13/odin
|
e5f44f9b6c483d6498767899315ae56e06fe36c4
|
[
"MIT"
] | null | null | null |
from .bashplot import *
from .figures import *
from .graph import *
| 22.333333
| 23
| 0.746269
| 9
| 67
| 5.555556
| 0.555556
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 67
| 3
| 24
| 22.333333
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
28ab0e7f5fa2801370440ba91b3f7b5666767a15
| 5,167
|
py
|
Python
|
theano/sandbox/cuda/tests/test_vector_matrix_dot.py
|
josharian/Theano
|
724a25692090fee26eebf72f5d046ca8662089c1
|
[
"BSD-3-Clause"
] | 1
|
2016-05-07T14:52:38.000Z
|
2016-05-07T14:52:38.000Z
|
theano/sandbox/cuda/tests/test_vector_matrix_dot.py
|
josharian/Theano
|
724a25692090fee26eebf72f5d046ca8662089c1
|
[
"BSD-3-Clause"
] | null | null | null |
theano/sandbox/cuda/tests/test_vector_matrix_dot.py
|
josharian/Theano
|
724a25692090fee26eebf72f5d046ca8662089c1
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy
import theano
# Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest
import theano.sandbox.cuda as cuda_ndarray
if cuda_ndarray.cuda_available == False:
raise SkipTest('Optional package cuda disabled')
import theano.sandbox.cuda as cuda
import theano.sandbox.cuda.blas as blasop
### Tolerance factor used in this tests !!!
atol = 1e-6
##########################
if theano.config.mode=='FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
mode_without_gpu = theano.compile.mode.get_mode('FAST_RUN').excluding('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
mode_without_gpu = theano.compile.mode.get_default_mode().excluding('gpu')
def test_dot_vm():
''' Test vector dot matrix '''
v = theano.shared( numpy.array(numpy.random.rand(2), dtype='float32'))
m = theano.shared( numpy.array(numpy.random.rand(2,5),
dtype='float32'))
no_gpu_f = theano.function([], theano.dot(v,m), mode = mode_without_gpu)
gpu_f = theano.function([], theano.dot(v,m), mode = mode_with_gpu)
#gpu_f2 is needed to test the case when the input is not on the gpu
#but the output is moved to the gpu.
gpu_f2 = theano.function([], cuda.gpu_from_host(theano.dot(v,m)), mode = mode_with_gpu)
# Assert they produce the same output
assert numpy.allclose(no_gpu_f(), gpu_f(), atol = atol)
assert numpy.allclose(no_gpu_f(), gpu_f2(), atol = atol)
# Assert that the gpu version actually uses gpu
assert sum([isinstance(node.op, blasop.GpuDot22) for node in
gpu_f.maker.env.toposort() ]) == 1
assert sum([isinstance(node.op, blasop.GpuDot22) for node in
gpu_f2.maker.env.toposort() ]) == 1
def test_dot_mv():
''' Test matrix dot vector '''
v = theano.shared( numpy.array(numpy.random.rand(2), dtype='float32'))
m = theano.shared( numpy.array(numpy.random.rand(5,2),
dtype='float32'))
no_gpu_f = theano.function([], theano.dot(m,v), mode = mode_without_gpu)
gpu_f = theano.function([], theano.dot(m,v), mode = mode_with_gpu)
#gpu_f2 is needed to test the case when the input is not on the gpu
#but the output is moved to the gpu.
gpu_f2 = theano.function([], cuda.gpu_from_host(theano.dot(m,v)), mode = mode_with_gpu)
# Assert they produce the same output
assert numpy.allclose(no_gpu_f(), gpu_f(), atol = atol)
assert numpy.allclose(no_gpu_f(), gpu_f2(), atol = atol)
# Assert that the gpu version actually uses gpu
assert sum([isinstance(node.op, blasop.GpuDot22) for node in
gpu_f.maker.env.toposort() ]) == 1
assert sum([isinstance(node.op, blasop.GpuDot22) for node in
gpu_f2.maker.env.toposort() ]) == 1
def test_gemv1():
''' test vector1+dot(matrix,vector2) '''
v1 = theano.tensor._shared( numpy.array(numpy.random.rand(2) , dtype='float32'))
v2 = theano.tensor._shared( numpy.array(numpy.random.rand(5) , dtype='float32'))
m = theano.tensor._shared( numpy.array(numpy.random.rand(5,2), dtype='float32'))
no_gpu_f = theano.function([], v2+theano.dot(m,v1), mode = mode_without_gpu)
gpu_f = theano.function([], v2+theano.dot(m,v1), mode = mode_with_gpu)
#gpu_f2 is needed to test the case when the input is not on the gpu
#but the output is moved to the gpu.
gpu_f2 = theano.function([], cuda.gpu_from_host(v2+theano.dot(m,v1)), mode = mode_with_gpu)
# Assert they produce the same output
assert numpy.allclose(no_gpu_f(), gpu_f(), atol = atol)
assert numpy.allclose(no_gpu_f(), gpu_f2(), atol = atol)
# Assert that the gpu version actually uses gpu
assert sum([node.op is cuda.blas.gpu_gemm_inplace for node in gpu_f2.maker.env.toposort()]) == 1
assert sum([node.op is cuda.blas.gpu_gemm_inplace for node in gpu_f.maker.env.toposort()]) == 1
def test_gemv2():
''' test vector1+dot(vector2,matrix) '''
v1 = theano.shared( numpy.array(numpy.random.rand(5) , dtype='float32'))
v2 = theano.shared( numpy.array(numpy.random.rand(2) , dtype='float32'))
m = theano.shared( numpy.array(numpy.random.rand(5,2), dtype='float32'))
no_gpu_f = theano.function([], v2+theano.dot(v1,m), mode = mode_without_gpu)
gpu_f = theano.function([], v2+theano.dot(v1,m), mode = mode_with_gpu)
#gpu_f2 is needed to test the case when the input is not on the gpu
#but the output is moved to the gpu.
gpu_f2 = theano.function([], cuda.gpu_from_host(v2+theano.dot(v1,m)), mode = mode_with_gpu)
# Assert they produce the same output
assert numpy.allclose(no_gpu_f(), gpu_f(), atol = atol)
assert numpy.allclose(no_gpu_f(), gpu_f2(), atol = atol)
# Assert that the gpu version actually uses gpu
assert sum([node.op is cuda.blas.gpu_gemm_inplace for node in gpu_f2.maker.env.toposort()]) == 1
assert sum([node.op is cuda.blas.gpu_gemm_inplace for node in gpu_f.maker.env.toposort()]) == 1
if __name__=='__main__':
test_dot_vm()
test_dot_mv()
test_gemv1()
test_gemv2()
| 46.972727
| 100
| 0.673892
| 816
| 5,167
| 4.10049
| 0.131127
| 0.028691
| 0.021518
| 0.062762
| 0.858637
| 0.858637
| 0.839211
| 0.82098
| 0.800956
| 0.72116
| 0
| 0.02191
| 0.187343
| 5,167
| 109
| 101
| 47.40367
| 0.774946
| 0.180375
| 0
| 0.352941
| 0
| 0
| 0.035534
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 1
| 0.058824
| false
| 0
| 0.088235
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
28c1134ed6d789640d82575c3d2faba6852a9776
| 36
|
py
|
Python
|
holobot/discord/sdk/commands/enums/__init__.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | 1
|
2021-05-24T00:17:46.000Z
|
2021-05-24T00:17:46.000Z
|
holobot/discord/sdk/commands/enums/__init__.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | 41
|
2021-03-24T22:50:09.000Z
|
2021-12-17T12:15:13.000Z
|
holobot/discord/sdk/commands/enums/__init__.py
|
rexor12/holobot
|
89b7b416403d13ccfeee117ef942426b08d3651d
|
[
"MIT"
] | null | null | null |
from .option_type import OptionType
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
28e47cdbdd9a4e8b4f31132283ecb593b8cd4329
| 139
|
py
|
Python
|
pylangacq/__init__.py
|
terrykwon/pylangacq
|
edd94e72e84976992d5bbef4b1232bafc6558820
|
[
"MIT"
] | null | null | null |
pylangacq/__init__.py
|
terrykwon/pylangacq
|
edd94e72e84976992d5bbef4b1232bafc6558820
|
[
"MIT"
] | null | null | null |
pylangacq/__init__.py
|
terrykwon/pylangacq
|
edd94e72e84976992d5bbef4b1232bafc6558820
|
[
"MIT"
] | null | null | null |
from pylangacq.chat import read_chat, Reader
from pylangacq._version import __version__
__all__ = ["__version__", "read_chat", "Reader"]
| 23.166667
| 48
| 0.784173
| 17
| 139
| 5.529412
| 0.470588
| 0.276596
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115108
| 139
| 5
| 49
| 27.8
| 0.764228
| 0
| 0
| 0
| 0
| 0
| 0.18705
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
28f566d5e261ebe50f186052f69726a9f605fc79
| 21
|
py
|
Python
|
dist/micropy-cli/frozen/random.py
|
kevindawson/Pico-Stub
|
6f9112779d4d81f821a3af273a450b9329ccdbab
|
[
"Apache-2.0"
] | 19
|
2021-01-25T23:56:09.000Z
|
2022-02-21T13:55:16.000Z
|
dist/micropy-cli/frozen/random.py
|
kevindawson/Pico-Stub
|
6f9112779d4d81f821a3af273a450b9329ccdbab
|
[
"Apache-2.0"
] | 18
|
2021-02-06T09:03:09.000Z
|
2021-10-04T16:36:35.000Z
|
dist/micropy-cli/frozen/random.py
|
kevindawson/Pico-Stub
|
6f9112779d4d81f821a3af273a450b9329ccdbab
|
[
"Apache-2.0"
] | 6
|
2021-01-26T08:41:47.000Z
|
2021-04-27T11:33:33.000Z
|
from urandom import *
| 21
| 21
| 0.809524
| 3
| 21
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3a6d86034bb15256a65842b761c119d5b60d213e
| 186
|
py
|
Python
|
ding/framework/middleware/__init__.py
|
Hcnaeg/DI-engine
|
aba0c629f87649854091e9e59d948f83962e3e1e
|
[
"Apache-2.0"
] | null | null | null |
ding/framework/middleware/__init__.py
|
Hcnaeg/DI-engine
|
aba0c629f87649854091e9e59d948f83962e3e1e
|
[
"Apache-2.0"
] | null | null | null |
ding/framework/middleware/__init__.py
|
Hcnaeg/DI-engine
|
aba0c629f87649854091e9e59d948f83962e3e1e
|
[
"Apache-2.0"
] | null | null | null |
from .league_collector import league_collector
from .league_dispatcher import league_dispatcher
from .league_evaluator import league_evaluator
from .league_learner import league_learner
| 37.2
| 48
| 0.892473
| 24
| 186
| 6.583333
| 0.291667
| 0.253165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086022
| 186
| 4
| 49
| 46.5
| 0.929412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3a806423128b6794e90ae8e23560afc1d1c061f5
| 66
|
py
|
Python
|
pack/__init__.py
|
translationalneurosurgery/teach-pypackaging
|
442d54490c9e54b2e650f504811d9b8eee6d8163
|
[
"MIT"
] | null | null | null |
pack/__init__.py
|
translationalneurosurgery/teach-pypackaging
|
442d54490c9e54b2e650f504811d9b8eee6d8163
|
[
"MIT"
] | null | null | null |
pack/__init__.py
|
translationalneurosurgery/teach-pypackaging
|
442d54490c9e54b2e650f504811d9b8eee6d8163
|
[
"MIT"
] | null | null | null |
from pathlib import Path
print("Importing from", Path(__file__))
| 16.5
| 39
| 0.772727
| 9
| 66
| 5.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 3
| 40
| 22
| 0.810345
| 0
| 0
| 0
| 0
| 0
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
3a8e2a5a14b3f53ded0be857f20f503c306ef977
| 115
|
py
|
Python
|
mmcq/color.py
|
kanghyojun/mmcq.py
|
76c898a1717f3e8c985baa3d66758e0586e0979f
|
[
"MIT"
] | 17
|
2019-01-17T19:03:18.000Z
|
2021-12-05T23:23:02.000Z
|
mmcq/color.py
|
admire93/mmcq.py
|
76c898a1717f3e8c985baa3d66758e0586e0979f
|
[
"MIT"
] | 7
|
2015-03-14T06:42:49.000Z
|
2017-07-11T07:51:37.000Z
|
mmcq/color.py
|
admire93/mmcq.py
|
76c898a1717f3e8c985baa3d66758e0586e0979f
|
[
"MIT"
] | 7
|
2015-02-06T21:52:46.000Z
|
2017-07-11T09:18:50.000Z
|
from .constant import SIGBITS
def get_color_index(r, g, b):
return (r << (2 * SIGBITS)) + (g << SIGBITS) + b
| 19.166667
| 52
| 0.608696
| 18
| 115
| 3.777778
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.226087
| 115
| 5
| 53
| 23
| 0.752809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3a8e6ab5a3a0d8097cad9e3a0c7364d059bdb0a7
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/aiohttp/client_proto.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/aiohttp/client_proto.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/aiohttp/client_proto.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/21/e9/3f/66e4ebff6c45a0ab77d33bde381875c58b7a89713b5d957050f8825d3b
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.427083
| 0
| 96
| 1
| 96
| 96
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3ad604b2a85c05426bea159b69c4870072ae5776
| 139
|
py
|
Python
|
apps/main/context_processors.py
|
wowkin2/django-template
|
7fc6b8ba6123b629c6242edf41f98fc35a81c672
|
[
"MIT"
] | null | null | null |
apps/main/context_processors.py
|
wowkin2/django-template
|
7fc6b8ba6123b629c6242edf41f98fc35a81c672
|
[
"MIT"
] | null | null | null |
apps/main/context_processors.py
|
wowkin2/django-template
|
7fc6b8ba6123b629c6242edf41f98fc35a81c672
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
def debug(context):
return {'DEBUG': settings.DEBUG}
| 15.444444
| 36
| 0.661871
| 19
| 139
| 4.842105
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.165468
| 139
| 8
| 37
| 17.375
| 0.784483
| 0.302158
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
3aeda6fb01c15aef433e4777929db98c19553540
| 42
|
py
|
Python
|
xitorch/interpolate/__init__.py
|
mfkasim91/lintorch
|
7a7da4b960e83c07e45ddb999da99510d3c9e909
|
[
"MIT"
] | 4
|
2020-10-15T15:07:54.000Z
|
2022-01-29T23:01:10.000Z
|
xitorch/interpolate/__init__.py
|
mfkasim91/lintorch
|
7a7da4b960e83c07e45ddb999da99510d3c9e909
|
[
"MIT"
] | 7
|
2020-09-16T11:44:34.000Z
|
2020-09-24T13:17:19.000Z
|
xitorch/interpolate/__init__.py
|
mfkasim91/lintorch
|
7a7da4b960e83c07e45ddb999da99510d3c9e909
|
[
"MIT"
] | 2
|
2020-09-17T09:41:33.000Z
|
2020-09-17T10:00:40.000Z
|
from xitorch.interpolate.interp1 import *
| 21
| 41
| 0.833333
| 5
| 42
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.095238
| 42
| 1
| 42
| 42
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aafb5a0b80ebf1847a0d3ea04407d381c52e1429
| 147,129
|
py
|
Python
|
tests/tests.py
|
MCXA/PhenotypeCV
|
5e0bc86682aa7ab85bbdb5d5981f7e67a4f71b64
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
MCXA/PhenotypeCV
|
5e0bc86682aa7ab85bbdb5d5981f7e67a4f71b64
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
MCXA/PhenotypeCV
|
5e0bc86682aa7ab85bbdb5d5981f7e67a4f71b64
|
[
"MIT"
] | 1
|
2020-01-17T04:52:54.000Z
|
2020-01-17T04:52:54.000Z
|
#!/usr/bin/env python
import pytest
import os
import shutil
import numpy as np
import cv2
from plantcv import plantcv as pcv
import plantcv.learn
# Import matplotlib and use a null Template to block plotting to screen
# This will let us test debug = "plot"
import matplotlib
matplotlib.use('Template', warn=False)
TEST_DATA = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
TEST_TMPDIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", ".cache")
TEST_COLOR_DIM = (2056, 2454, 3)
TEST_GRAY_DIM = (2056, 2454)
TEST_BINARY_DIM = TEST_GRAY_DIM
TEST_INPUT_COLOR = "input_color_img.jpg"
TEST_INPUT_GRAY = "input_gray_img.jpg"
TEST_INPUT_GRAY_SMALL = "input_gray_img_small.jpg"
TEST_INPUT_BINARY = "input_binary_img.png"
TEST_INPUT_BAYER = "bayer_img.png"
TEST_INPUT_ROI = "input_roi.npz"
TEST_INPUT_CONTOURS = "input_contours.npz"
TEST_INPUT_CONTOURS1 = "input_contours1.npz"
TEST_VIS = "VIS_SV_0_z300_h1_g0_e85_v500_93054.png"
TEST_NIR = "NIR_SV_0_z300_h1_g0_e15000_v500_93059.png"
TEST_VIS_TV = "VIS_TV_0_z300_h1_g0_e85_v500_93054.png"
TEST_NIR_TV = "NIR_TV_0_z300_h1_g0_e15000_v500_93059.png"
TEST_INPUT_MASK = "input_mask.png"
TEST_INPUT_MASK_RESIZE = "input_mask_resize.png"
TEST_INPUT_NIR_MASK = "input_nir.png"
TEST_INPUT_FDARK = "FLUO_TV_dark.png"
TEST_INPUT_FDARK_LARGE = "FLUO_TV_DARK_large"
TEST_INPUT_FMIN = "FLUO_TV_min.png"
TEST_INPUT_FMAX = "FLUO_TV_max.png"
TEST_INPUT_FMASK = "FLUO_TV_MASK.png"
TEST_INTPUT_GREENMAG = "input_green-magenta.jpg"
TEST_INTPUT_MULTI = "multi_ori_image.jpg"
TEST_INPUT_MULTI_OBJECT = "roi_objects.npz"
TEST_INPUT_MULTI_CONTOUR = "multi_contours.npz"
TEST_INPUT_ClUSTER_CONTOUR = "clusters_i.npz"
TEST_INPUT_MULTI_HIERARCHY = "multi_hierarchy.npz"
TEST_INPUT_GENOTXT = "cluster_names.txt"
TEST_INPUT_GENOTXT_TOO_MANY = "cluster_names_too_many.txt"
TEST_INPUT_CROPPED = 'cropped_img.jpg'
TEST_INPUT_CROPPED_MASK = 'cropped-mask.png'
TEST_INPUT_MARKER = 'seed-image.jpg'
TEST_FOREGROUND = "TEST_FOREGROUND.jpg"
TEST_BACKGROUND = "TEST_BACKGROUND.jpg"
TEST_PDFS = "naive_bayes_pdfs.txt"
TEST_PDFS_BAD = "naive_bayes_pdfs_bad.txt"
TEST_VIS_SMALL = "setaria_small_vis.png"
TEST_MASK_SMALL = "setaria_small_mask.png"
TEST_VIS_COMP_CONTOUR = "setaria_composed_contours.npz"
TEST_ACUTE_RESULT = np.asarray([[[119, 285]], [[151, 280]], [[168, 267]], [[168, 262]], [[171, 261]], [[224, 269]],
[[246, 271]], [[260, 277]], [[141, 248]], [[183, 194]], [[188, 237]], [[173, 240]],
[[186, 260]], [[147, 244]], [[163, 246]], [[173, 268]], [[170, 272]], [[151, 320]],
[[195, 289]], [[228, 272]], [[210, 272]], [[209, 247]], [[210, 232]]])
TEST_VIS_SMALL_PLANT = "setaria_small_plant_vis.png"
TEST_MASK_SMALL_PLANT = "setaria_small_plant_mask.png"
TEST_VIS_COMP_CONTOUR_SMALL_PLANT = "setaria_small_plant_composed_contours.npz"
TEST_SAMPLED_RGB_POINTS = "sampled_rgb_points.txt"
TEST_TARGET_IMG = "target_img.png"
TEST_TARGET_IMG_WITH_HEXAGON = "target_img_w_hexagon.png"
TEST_TARGET_IMG_TRIANGLE = "target_img copy.png"
TEST_SOURCE1_IMG = "source1_img.png"
TEST_SOURCE2_IMG = "source2_img.png"
TEST_TARGET_MASK = "mask_img.png"
TEST_TARGET_IMG_COLOR_CARD = "color_card_target.png"
TEST_SOURCE2_MASK = "mask2_img.png"
TEST_TARGET_MATRIX = "target_matrix.npz"
TEST_SOURCE1_MATRIX = "source1_matrix.npz"
TEST_SOURCE2_MATRIX = "source2_matrix.npz"
TEST_MATRIX_B1 = "matrix_b1.npz"
TEST_MATRIX_B2 = "matrix_b2.npz"
TEST_TRANSFORM1 = "transformation_matrix1.npz"
TEST_MATRIX_M1 = "matrix_m1.npz"
TEST_MATRIX_M2 = "matrix_m2.npz"
TEST_S1_CORRECTED = "source_corrected.png"
# ##########################
# Tests setup function
# ##########################
def setup_function():
if not os.path.exists(TEST_TMPDIR):
os.mkdir(TEST_TMPDIR)
# ##########################
# Tests for the main package
# ##########################
def test_plantcv_acute():
# Read in test data
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR), encoding="latin1")
obj_contour = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.acute(obj=obj_contour, win=5, thresh=15, mask=mask)
_ = pcv.acute(obj=obj_contour, win=0, thresh=15, mask=mask)
_ = pcv.acute(obj=np.array(([[213,190]],[[83,61]],[[149,246]])), win=84, thresh=192, mask=mask)
_ = pcv.acute(obj=np.array(([[3, 29]], [[31, 102]], [[161, 63]])), win = 148, thresh = 56, mask = mask)
_ = pcv.acute(obj=np.array(([[103, 154]], [[27, 227]], [[152, 83]])), win = 35, thresh = 0, mask = mask)
# Test with debug = None
pcv.params.debug = None
_ = pcv.acute(obj=np.array(([[103, 154]], [[27, 227]], [[152, 83]])), win=35, thresh=0, mask=mask)
_ = pcv.acute(obj=obj_contour, win=0, thresh=15, mask=mask)
homology_pts = pcv.acute(obj=obj_contour, win=5, thresh=15, mask=mask)
assert all([i == j] for i, j in zip(np.shape(homology_pts), (29, 1, 2)))
def test_plantcv_acute_vertex():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_acute_vertex")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL))
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR), encoding="latin1")
obj_contour = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.acute_vertex(obj=obj_contour, win=5, thresh=15, sep=5, img=img)
_ = pcv.acute_vertex(obj=[], win=5, thresh=15, sep=5, img=img)
_ = pcv.acute_vertex(obj=[], win=.01, thresh=.01, sep=1, img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.acute_vertex(obj=obj_contour, win=5, thresh=15, sep=5, img=img)
# Test with debug = None
pcv.params.debug = None
acute = pcv.acute_vertex(obj=obj_contour, win=5, thresh=15, sep=5, img=img)
assert all([i == j] for i, j in zip(np.shape(acute), np.shape(TEST_ACUTE_RESULT)))
def test_plantcv_acute_vertex_bad_obj():
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL))
obj_contour = np.array([])
pcv.params.debug = None
result = pcv.acute_vertex(obj=obj_contour, win=5, thresh=15, sep=5, img=img)
assert all([i == j] for i, j in zip(result, [0, ("NA", "NA")]))
def test_plantcv_analyze_bound_horizontal():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_bound_horizontal")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
img_above_bound_only = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL_PLANT))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.analyze_bound_horizontal(img=img, obj=object_contours, mask=mask, line_position=300)
_ = pcv.analyze_bound_horizontal(img=img, obj=object_contours, mask=mask, line_position=100)
_ = pcv.analyze_bound_horizontal(img=img_above_bound_only, obj=object_contours, mask=mask, line_position=1756)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.analyze_bound_horizontal(img=img, obj=object_contours, mask=mask, line_position=1756)
# Test with debug = None
pcv.params.debug = None
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_horizontal(img=img, obj=object_contours,
mask=mask, line_position=1756)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert boundary_data[3] == 62555
def test_plantcv_analyze_bound_horizontal_grayscale_image():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with a grayscale reference image and debug="plot"
pcv.params.debug = "plot"
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_horizontal(img=img, obj=object_contours,
mask=mask, line_position=1756)
assert boundary_data[3] == 62555
def test_plantcv_analyze_bound_horizontal_neg_y():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with debug=None, line position that will trigger -y
pcv.params.debug = "plot"
_ = pcv.analyze_bound_horizontal(img=img, obj=object_contours, mask=mask, line_position=(-1000))
_ = pcv.analyze_bound_horizontal(img=img, obj=object_contours, mask=mask, line_position=(0))
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_horizontal(img=img, obj=object_contours,
mask=mask, line_position=2056)
assert boundary_data[3] == 63632
def test_plantcv_analyze_bound_vertical():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_bound_vertical")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.analyze_bound_vertical(img=img, obj=object_contours, mask=mask, line_position=1000)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.analyze_bound_vertical(img=img, obj=object_contours, mask=mask, line_position=1000)
# Test with debug = None
pcv.params.debug = None
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_vertical(img=img, obj=object_contours,
mask=mask, line_position=1000)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert boundary_data[3] == 5016
def test_plantcv_analyze_bound_vertical_grayscale_image():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with a grayscale reference image and debug="plot"
pcv.params.debug = "plot"
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_vertical(img=img, obj=object_contours,
mask=mask, line_position=1000)
assert boundary_data[3] == 5016
def test_plantcv_analyze_bound_vertical_neg_x():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with debug="plot", line position that will trigger -x
pcv.params.debug = "plot"
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_vertical(img=img, obj=object_contours,
mask=mask, line_position=2454)
assert boundary_data[3] == 63632
def test_plantcv_analyze_bound_vertical_small_x():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
object_contours = contours_npz['arr_0']
# Test with debug='plot', line position that will trigger -x, and two channel object
pcv.params.debug = "plot"
boundary_header, boundary_data, boundary_img1 = pcv.analyze_bound_vertical(img=img, obj=object_contours,
mask=mask, line_position=1)
assert boundary_data[3] == 0
def test_plantcv_analyze_color():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_color")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type="all")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type='rgb')
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type='lab')
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type='hsv')
# Test with debug = None
pcv.params.debug = None
color_header, color_data, analysis_images = pcv.analyze_color(rgb_img=img, mask=mask, bins=256,
hist_plot_type=None)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert np.sum(color_data[3]) != 0
def test_plantcv_analyze_color_incorrect_image():
img_binary = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
with pytest.raises(RuntimeError):
_ = pcv.analyze_color(rgb_img=img_binary, mask=mask, bins=256, hist_plot_type=None)
def test_plantcv_analyze_color_bad_hist_type():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
pcv.params.debug = "plot"
with pytest.raises(RuntimeError):
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type='bgr')
# def test_plantcv_analyze_color_incorrect_pseudo_channel():
# img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# with pytest.raises(RuntimeError):
# pcv.params.debug = "plot"
# _ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type=None, pseudo_channel="x",
# pseudo_bkg="white", filename=False)
#
#
# def test_plantcv_analyze_color_incorrect_pseudo_background():
# img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# with pytest.raises(RuntimeError):
# pcv.params.debug = "plot"
# _ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type=None, pseudo_channel="v",
# pseudo_bkg="black", filename=False)
def test_plantcv_analyze_color_incorrect_hist_plot_type():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.analyze_color(rgb_img=img, mask=mask, bins=256, hist_plot_type="bgr")
def test_plantcv_analyze_nir():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_nir")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), 0)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.analyze_nir_intensity(gray_img=np.uint16(img), mask=mask, bins=256, histplot=True)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.analyze_nir_intensity(gray_img=img, mask=mask, bins=256, histplot=False)
# Test with debug = "plot"
_ = pcv.analyze_nir_intensity(gray_img=img, mask=mask, bins=256, histplot=True)
# Test with debug = None
pcv.params.debug = None
hist_header, hist_data, h_norm = pcv.analyze_nir_intensity(gray_img=img, mask=mask, bins=256, histplot=False)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert np.sum(hist_data[3]) == 63632
def test_plantcv_analyze_object():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
obj_contour = contours_npz['arr_0']
# max_obj = max(obj_contour, key=len)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
# Test with debug = None
pcv.params.debug = None
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert obj_data[1] != 0
def test_plantcv_analyze_object_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), 0)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
obj_contour = contours_npz['arr_0']
# max_obj = max(obj_contour, key=len)
# Test with debug = "plot"
pcv.params.debug = "plot"
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
assert obj_data[1] != 0
def test_plantcv_analyze_object_zero_slope():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object_zero_slope")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Create a test image
img = np.zeros((50, 50, 3), dtype=np.uint8)
img[10:11, 10:40, 0] = 255
mask = img[:, :, 0]
obj_contour = np.array([[[10, 10]], [[11, 10]], [[12, 10]], [[13, 10]], [[14, 10]], [[15, 10]], [[16, 10]],
[[17, 10]], [[18, 10]], [[19, 10]], [[20, 10]], [[21, 10]], [[22, 10]], [[23, 10]],
[[24, 10]], [[25, 10]], [[26, 10]], [[27, 10]], [[28, 10]], [[29, 10]], [[30, 10]],
[[31, 10]], [[32, 10]], [[33, 10]], [[34, 10]], [[35, 10]], [[36, 10]], [[37, 10]],
[[38, 10]], [[39, 10]], [[38, 10]], [[37, 10]], [[36, 10]], [[35, 10]], [[34, 10]],
[[33, 10]], [[32, 10]], [[31, 10]], [[30, 10]], [[29, 10]], [[28, 10]], [[27, 10]],
[[26, 10]], [[25, 10]], [[24, 10]], [[23, 10]], [[22, 10]], [[21, 10]], [[20, 10]],
[[19, 10]], [[18, 10]], [[17, 10]], [[16, 10]], [[15, 10]], [[14, 10]], [[13, 10]],
[[12, 10]], [[11, 10]]], dtype=np.int32)
# Test with debug = None
pcv.params.debug = None
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
assert obj_data[7] == 30
def test_plantcv_analyze_object_longest_axis_2d():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object_longest_axis_2d")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Create a test image
img = np.zeros((50, 50, 3), dtype=np.uint8)
img[0:5, 45:49, 0] = 255
img[0:5, 0:5, 0] = 255
mask = img[:, :, 0]
obj_contour = np.array([[[45, 1]], [[45, 2]], [[45, 3]], [[45, 4]], [[46, 4]], [[47, 4]], [[48, 4]],
[[48, 3]], [[48, 2]], [[48, 1]], [[47, 1]], [[46, 1]], [[1, 1]], [[1, 2]],
[[1, 3]], [[1, 4]], [[2, 4]], [[3, 4]], [[4, 4]], [[4, 3]], [[4, 2]],
[[4, 1]], [[3, 1]], [[2, 1]]], dtype=np.int32)
# Test with debug = None
pcv.params.debug = None
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
assert obj_data[7] == 186
def test_plantcv_analyze_object_longest_axis_2e():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object_longest_axis_2e")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Create a test image
img = np.zeros((50, 50, 3), dtype=np.uint8)
img[10:15, 10:40, 0] = 255
mask = img[:, :, 0]
obj_contour = np.array([[[10, 10]], [[10, 11]], [[10, 12]], [[10, 13]], [[10, 14]], [[11, 14]], [[12, 14]],
[[13, 14]], [[14, 14]], [[15, 14]], [[16, 14]], [[17, 14]], [[18, 14]], [[19, 14]],
[[20, 14]], [[21, 14]], [[22, 14]], [[23, 14]], [[24, 14]], [[25, 14]], [[26, 14]],
[[27, 14]], [[28, 14]], [[29, 14]], [[30, 14]], [[31, 14]], [[32, 14]], [[33, 14]],
[[34, 14]], [[35, 14]], [[36, 14]], [[37, 14]], [[38, 14]], [[39, 14]], [[39, 13]],
[[39, 12]], [[39, 11]], [[39, 10]], [[38, 10]], [[37, 10]], [[36, 10]], [[35, 10]],
[[34, 10]], [[33, 10]], [[32, 10]], [[31, 10]], [[30, 10]], [[29, 10]], [[28, 10]],
[[27, 10]], [[26, 10]], [[25, 10]], [[24, 10]], [[23, 10]], [[22, 10]], [[21, 10]],
[[20, 10]], [[19, 10]], [[18, 10]], [[17, 10]], [[16, 10]], [[15, 10]], [[14, 10]],
[[13, 10]], [[12, 10]], [[11, 10]]], dtype=np.int32)
# Test with debug = None
pcv.params.debug = None
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
assert obj_data[7] == 141
def test_plantcv_analyze_object_small_contour():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_analyze_object_small_contour")
os.mkdir(cache_dir)
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
obj_contour = [np.array([[[0, 0]], [[0, 50]], [[50, 50]], [[50, 0]]], dtype=np.int32)]
# Test with debug = None
pcv.params.debug = None
obj_header, obj_data, obj_images = pcv.analyze_object(img=img, obj=obj_contour, mask=mask)
assert obj_data is None
def test_plantcv_apply_mask_white():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_apply_mask_white")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="white")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="white")
# Test with debug = None
pcv.params.debug = None
masked_img = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="white")
assert all([i == j] for i, j in zip(np.shape(masked_img), TEST_COLOR_DIM))
def test_plantcv_apply_mask_black():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_apply_mask_black")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="black")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="black")
# Test with debug = None
pcv.params.debug = None
masked_img = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="black")
assert all([i == j] for i, j in zip(np.shape(masked_img), TEST_COLOR_DIM))
def test_plantcv_apply_mask_bad_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.apply_mask(rgb_img=img, mask=mask, mask_color="wite")
def test_plantcv_auto_crop():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_auto_crop")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), -1)
contours = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_OBJECT), encoding="latin1")
roi_contours = contours['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.auto_crop(img=img1, obj=roi_contours[1], padding_x=20, padding_y=20, color='black')
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.auto_crop(img=img1, obj=roi_contours[1], padding_x=20, padding_y=20, color='image')
# Test with debug = None
pcv.params.debug = None
cropped = pcv.auto_crop(img=img1, obj=roi_contours[1], padding_x=20, padding_y=20, color='black')
x, y, z = np.shape(img1)
x1, y1, z1 = np.shape(cropped)
assert x > x1
def test_plantcv_auto_crop_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_auto_crop_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), -1)
gray_img = cv2.cvtColor(rgb_img, cv2.COLOR_BGR2GRAY)
contours = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_OBJECT), encoding="latin1")
roi_contours = contours['arr_0']
# Test with debug = "plot"
pcv.params.debug = "plot"
cropped = pcv.auto_crop(img=gray_img, obj=roi_contours[1], padding_x=20, padding_y=20, color='white')
x, y = np.shape(gray_img)
x1, y1 = np.shape(cropped)
assert x > x1
def test_plantcv_auto_crop_bad_input():
# Read in test data
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), -1)
gray_img = cv2.cvtColor(rgb_img, cv2.COLOR_BGR2GRAY)
contours = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_OBJECT), encoding="latin1")
roi_contours = contours['arr_0']
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.auto_crop(img=gray_img, obj=roi_contours[1], padding_x=20, padding_y=20, color='wite')
def test_plantcv_canny_edge_detect():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_canny_edge_detect")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.canny_edge_detect(img=rgb_img, mask=mask, mask_color='white')
_ = pcv.canny_edge_detect(img=img, mask=mask, mask_color='black')
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.canny_edge_detect(img=img, thickness=2)
_ = pcv.canny_edge_detect(img=img)
# Test with debug = None
pcv.params.debug = None
edge_img = pcv.canny_edge_detect(img=img)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(edge_img), TEST_BINARY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(edge_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_canny_edge_detect_bad_input():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_canny_edge_detect")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
with pytest.raises(RuntimeError):
edge_img = pcv.canny_edge_detect(img=img, mask=mask, mask_color="gray")
def test_plantcv_cluster_contours():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_cluster_contours")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), -1)
roi_objects = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_OBJECT), encoding="latin1")
hierachy = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_HIERARCHY), encoding="latin1")
objs = roi_objects['arr_0']
obj_hierarchy = hierachy['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy, nrow=4, ncol=6)
_ = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy, show_grid=True)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy, nrow=4, ncol=6)
# Test with debug = None
pcv.params.debug = None
clusters_i, contours, hierachy = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy,
nrow=4, ncol=6)
lenori = len(objs)
lenclust = len(clusters_i)
assert lenori > lenclust
def test_plantcv_cluster_contours_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_cluster_contours_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), 0)
roi_objects = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_OBJECT), encoding="latin1")
hierachy = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_HIERARCHY), encoding="latin1")
objs = roi_objects['arr_0']
obj_hierarchy = hierachy['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy, nrow=4, ncol=6)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy, nrow=4, ncol=6)
# Test with debug = None
pcv.params.debug = None
clusters_i, contours, hierachy = pcv.cluster_contours(img=img1, roi_objects=objs, roi_obj_hierarchy=obj_hierarchy,
nrow=4, ncol=6)
lenori = len(objs)
lenclust = len(clusters_i)
assert lenori > lenclust
def test_plantcv_cluster_contours_splitimg():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_cluster_contours_splitimg")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_MULTI), -1)
contours = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_CONTOUR), encoding="latin1")
clusters = np.load(os.path.join(TEST_DATA, TEST_INPUT_ClUSTER_CONTOUR), encoding="latin1")
hierachy = np.load(os.path.join(TEST_DATA, TEST_INPUT_MULTI_HIERARCHY), encoding="latin1")
cluster_names = os.path.join(TEST_DATA, TEST_INPUT_GENOTXT)
cluster_names_too_many = os.path.join(TEST_DATA, TEST_INPUT_GENOTXT_TOO_MANY)
roi_contours = contours['arr_0']
cluster_contours = clusters['arr_0']
obj_hierarchy = hierachy['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=cluster_contours, contours=roi_contours,
hierarchy=obj_hierarchy, outdir=cache_dir, file=None, filenames=None)
_ = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=[[0]], contours=[],
hierarchy=np.array([[[ 1, -1, -1, -1]]]))
_ = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=cluster_contours, contours=roi_contours,
hierarchy=obj_hierarchy, outdir=cache_dir, file='multi', filenames=None)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=cluster_contours, contours=roi_contours,
hierarchy=obj_hierarchy, outdir=None, file=None, filenames=cluster_names)
_ = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=cluster_contours, contours=roi_contours,
hierarchy=obj_hierarchy, outdir=None, file=None, filenames=cluster_names_too_many)
# Test with debug = None
pcv.params.debug = None
output_path = pcv.cluster_contour_splitimg(rgb_img=img1, grouped_contour_indexes=cluster_contours,
contours=roi_contours, hierarchy=obj_hierarchy, outdir=None, file=None,
filenames=None)
assert len(output_path) != 0
def test_plantcv_color_palette():
# Collect assertions
truths = []
# Return one random color
colors = pcv.color_palette(1)
# Colors should be a list of length 1, containing a tuple of length 3
truths.append(len(colors) == 1)
truths.append(len(colors[0]) == 3)
# Return ten random colors
colors = pcv.color_palette(10)
# Colors should be a list of length 10
truths.append(len(colors) == 10)
# All of these should be true for the function to pass testing.
assert (all(truths))
def test_plantcv_crop_position_mask():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_crop_position_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
nir, path1, filename1 = pcv.readimage(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK), -1)
mask_three_channel = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK))
mask_resize = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK_RESIZE), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
_ = pcv.crop_position_mask(nir, mask_resize, x=40, y=3, v_pos="top", h_pos="right")
_ = pcv.crop_position_mask(nir, mask_three_channel, x=40, y=3, v_pos="top", h_pos="right")
# Test with debug = "print" with bottom
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="bottom", h_pos="left")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
# Test with debug = "plot" with bottom
_ = pcv.crop_position_mask(nir, mask, x=45, y=2, v_pos="bottom", h_pos="left")
# Test with debug = None
pcv.params.debug = None
newmask = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
assert np.sum(newmask) == 641517
def test_plantcv_crop_position_mask_color():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_crop_position_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
nir, path1, filename1 = pcv.readimage(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK), -1)
mask_resize = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK_RESIZE))
mask_non_binary = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
# Test with debug = "print" with bottom
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="bottom", h_pos="left")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
# Test with debug = "plot" with bottom
_ = pcv.crop_position_mask(nir, mask, x=45, y=2, v_pos="bottom", h_pos="left")
_ = pcv.crop_position_mask(nir, mask_non_binary, x=45, y=2, v_pos="bottom", h_pos="left")
_ = pcv.crop_position_mask(nir, mask_non_binary, x=45, y=2, v_pos="top", h_pos="left")
_ = pcv.crop_position_mask(nir, mask_non_binary, x=45, y=2, v_pos="bottom", h_pos="right")
_ = pcv.crop_position_mask(nir, mask_resize, x=45, y=2, v_pos="top", h_pos="left")
# Test with debug = None
pcv.params.debug = None
newmask = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="right")
assert np.sum(newmask) == 641517
def test_plantcv_crop_position_mask_bad_input_x():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_crop_position_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK), -1)
# Read in test data
nir, path1, filename1 = pcv.readimage(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.crop_position_mask(nir, mask, x=-1, y=-1, v_pos="top", h_pos="right")
def test_plantcv_crop_position_mask_bad_input_vpos():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_crop_position_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK), -1)
# Read in test data
nir, path1, filename1 = pcv.readimage(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="below", h_pos="right")
def test_plantcv_crop_position_mask_bad_input_hpos():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_crop_position_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MASK), -1)
# Read in test data
nir, path1, filename1 = pcv.readimage(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.crop_position_mask(nir, mask, x=40, y=3, v_pos="top", h_pos="starboard")
def test_plantcv_dilate():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_dilate")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.dilate(gray_img=img, ksize=5, i=1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.dilate(gray_img=img, ksize=5, i=1)
# Test with debug = None
pcv.params.debug = None
dilate_img = pcv.dilate(gray_img=img, ksize=5, i=1)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(dilate_img), TEST_BINARY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(dilate_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_dilate_small_k():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = None
pcv.params.debug = None
with pytest.raises(ValueError):
_ = pcv.dilate(img, 1, 1)
def test_plantcv_erode():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_erode")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.erode(gray_img=img, ksize=5, i=1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.erode(gray_img=img, ksize=5, i=1)
# Test with debug = None
pcv.params.debug = None
erode_img = pcv.erode(gray_img=img, ksize=5, i=1)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(erode_img), TEST_BINARY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(erode_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_erode_small_k():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = None
pcv.params.debug = None
with pytest.raises(ValueError):
_ = pcv.erode(img, 1, 1)
def test_plantcv_distance_transform():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_distance_transform")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_CROPPED_MASK), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.distance_transform(bin_img=mask, distance_type=1, mask_size=3)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.distance_transform(bin_img=mask, distance_type=1, mask_size=3)
# Test with debug = None
pcv.params.debug = None
distance_transform_img = pcv.distance_transform(bin_img=mask, distance_type=1, mask_size=3)
# Assert that the output image has the dimensions of the input image
assert all([i == j] for i, j in zip(np.shape(distance_transform_img), np.shape(mask)))
def test_plantcv_fatal_error():
# Verify that the fatal_error function raises a RuntimeError
with pytest.raises(RuntimeError):
pcv.fatal_error("Test error")
def test_plantcv_fill():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_fill")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.fill(bin_img=img, size=63632)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.fill(bin_img=img, size=63632)
# Test with debug = None
pcv.params.debug = None
fill_img = pcv.fill(bin_img=img, size=63632)
# Assert that the output image has the dimensions of the input image
# assert all([i == j] for i, j in zip(np.shape(fill_img), TEST_BINARY_DIM))
assert np.sum(fill_img) == 0
def test_plantcv_fill_bad_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_fill_bad_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
_ = pcv.fill(bin_img=img, size=1)
def test_plantcv_find_objects():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_find_objects")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.find_objects(img=img, mask=mask)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.find_objects(img=img, mask=mask)
# Test with debug = None
pcv.params.debug = None
contours, hierarchy = pcv.find_objects(img=img, mask=mask)
# Assert the correct number of contours are found
if cv2.__version__[0] == '2':
assert len(contours) == 2
else:
assert len(contours) == 2
def test_plantcv_find_objects_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_find_objects_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), 0)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
contours, hierarchy = pcv.find_objects(img=img, mask=mask)
# Assert the correct number of contours are found
if cv2.__version__[0] == '2':
assert len(contours) == 2
else:
assert len(contours) == 2
def test_plantcv_flip():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_flip")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
img_binary = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.flip(img=img, direction="horizontal")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.flip(img=img, direction="vertical")
_ = pcv.flip(img=img_binary, direction="vertical")
# Test with debug = None
pcv.params.debug = None
flipped_img = pcv.flip(img=img, direction="horizontal")
assert all([i == j] for i, j in zip(np.shape(flipped_img), TEST_COLOR_DIM))
def test_plantcv_flip_bad_input():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.flip(img=img, direction="vert")
def test_plantcv_fluor_fvfm():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_fluor_fvfm")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
filename = os.path.join(cache_dir, 'plantcv_fvfm_hist.jpg')
# Read in test data
fdark = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FDARK), -1)
fmin = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMIN), -1)
fmax = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMAX), -1)
fmask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMASK), -1)
# Test with debug = "print"
pcv.params.debug = "print"
outfile = os.path.join(cache_dir, TEST_INPUT_FMAX)
_ = pcv.fluor_fvfm(fdark=fdark, fmin=fmin, fmax=fmax, mask=fmask, bins=1000)
_, _, analysis_images = pcv.fluor_fvfm(fdark=fdark+3000, fmin=fmin, fmax=fmax, mask=fmask, bins=1000)
# Test under updated print and plot function
hist_img = analysis_images[1]
pcv.print_image(hist_img, filename)
pcv.plot_image(hist_img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.fluor_fvfm(fdark=fdark, fmin=fmin, fmax=fmax, mask=fmask, bins=1000)
# Test with debug = None
pcv.params.debug = None
fvfm_header, fvfm_data, fvfm_images = pcv.fluor_fvfm(fdark=fdark, fmin=fmin, fmax=fmax, mask=fmask, bins=1000)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert fvfm_data[4] > 0.66
def test_plantcv_fluor_fvfm_bad_input():
fdark = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
fmin = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMIN), -1)
fmax = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMAX), -1)
fmask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_FMASK), -1)
with pytest.raises(RuntimeError):
_ = pcv.fluor_fvfm(fdark=fdark, fmin=fmin, fmax=fmax, mask=fmask, bins=1000)
def test_plantcv_gaussian_blur():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_gaussian_blur")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img_color = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.gaussian_blur(img=img, ksize=(51, 51), sigma_x=0, sigma_y=None)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.gaussian_blur(img=img, ksize=(51, 51), sigma_x=0, sigma_y=None)
_ = pcv.gaussian_blur(img=img_color, ksize=(51, 51), sigma_x=0, sigma_y=None)
# Test with debug = None
pcv.params.debug = None
gaussian_img = pcv.gaussian_blur(img=img, ksize=(51, 51), sigma_x=0, sigma_y=None)
imgavg = np.average(img)
gavg = np.average(gaussian_img)
assert gavg != imgavg
def test_plantcv_get_nir_sv():
nirpath = pcv.get_nir(TEST_DATA, TEST_VIS)
nirpath1 = os.path.join(TEST_DATA, TEST_NIR)
assert nirpath == nirpath1
def test_plantcv_get_nir_tv():
nirpath = pcv.get_nir(TEST_DATA, TEST_VIS_TV)
nirpath1 = os.path.join(TEST_DATA, TEST_NIR_TV)
assert nirpath == nirpath1
def test_plantcv_hist_equalization():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_hist_equalization")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.hist_equalization(gray_img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.hist_equalization(gray_img=img)
# Test with debug = None
pcv.params.debug = None
hist = pcv.hist_equalization(gray_img=img)
histavg = np.average(hist)
imgavg = np.average(img)
assert histavg != imgavg
def test_plantcv_hist_equalization_bad_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_hist_equalization_bad_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), 1)
# Test with debug = None
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.hist_equalization(gray_img=img)
def test_plantcv_image_add():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_image_add")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = np.copy(img1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.image_add(gray_img1=img1, gray_img2=img2)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.image_add(gray_img1=img1, gray_img2=img2)
# Test with debug = None
pcv.params.debug = None
added_img = pcv.image_add(gray_img1=img1, gray_img2=img2)
assert all([i == j] for i, j in zip(np.shape(added_img), TEST_BINARY_DIM))
def test_plantcv_image_subtract():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_image_sub")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# read in images
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = np.copy(img1)
# Test with debug = "print"
pcv.params.debug = 'print'
_ = pcv.image_subtract(img1, img2)
# Test with debug = "plot"
pcv.params.debug = 'plot'
_ = pcv.image_subtract(img1, img2)
# Test with debug = None
pcv.params.debug = None
new_img = pcv.image_subtract(img1, img2)
assert np.array_equal(new_img, np.zeros(np.shape(new_img), np.uint8))
def test_plantcv_image_subtract_fail():
# read in images
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY))
# test
with pytest.raises(RuntimeError):
_ = pcv.image_subtract(img1, img2)
def test_plantcv_invert():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_invert")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.invert(gray_img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.invert(gray_img=img)
# Test with debug = None
pcv.params.debug = None
inverted_img = pcv.invert(gray_img=img)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(inverted_img), TEST_BINARY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(inverted_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_landmark_reference_pt_dist():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_landmark_reference")
os.mkdir(cache_dir)
points_rescaled = [(0.0139, 0.2569), (0.2361, 0.2917), (0.3542, 0.3819), (0.3542, 0.4167), (0.375, 0.4236),
(0.7431, 0.3681), (0.8958, 0.3542), (0.9931, 0.3125), (0.1667, 0.5139), (0.4583, 0.8889),
(0.4931, 0.5903), (0.3889, 0.5694), (0.4792, 0.4306), (0.2083, 0.5417), (0.3194, 0.5278),
(0.3889, 0.375), (0.3681, 0.3472), (0.2361, 0.0139), (0.5417, 0.2292), (0.7708, 0.3472),
(0.6458, 0.3472), (0.6389, 0.5208), (0.6458, 0.625)]
centroid_rescaled = (0.4685, 0.4945)
bottomline_rescaled = (0.4685, 0.2569)
_ = pcv.landmark_reference_pt_dist(points_r=[], centroid_r=('a', 'b'), bline_r=(0, 0))
_ = pcv.landmark_reference_pt_dist(points_r=[(10, 1000)], centroid_r=(10, 10), bline_r=(10, 10))
_ = pcv.landmark_reference_pt_dist(points_r=[], centroid_r=(0, 0), bline_r=(0, 0))
header, landmark_data = pcv.landmark_reference_pt_dist(points_r=points_rescaled, centroid_r=centroid_rescaled,
bline_r=bottomline_rescaled)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert len(landmark_data) == 9
def test_plantcv_laplace_filter():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_laplace_filter")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.laplace_filter(gray_img=img, ksize=1, scale=1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.laplace_filter(gray_img=img, ksize=1, scale=1)
# Test with debug = None
pcv.params.debug = None
lp_img = pcv.laplace_filter(gray_img=img, ksize=1, scale=1)
# Assert that the output image has the dimensions of the input image
assert all([i == j] for i, j in zip(np.shape(lp_img), TEST_GRAY_DIM))
def test_plantcv_logical_and():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_logical_and")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = np.copy(img1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.logical_and(bin_img1=img1, bin_img2=img2)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.logical_and(bin_img1=img1, bin_img2=img2)
# Test with debug = None
pcv.params.debug = None
and_img = pcv.logical_and(bin_img1=img1, bin_img2=img2)
assert all([i == j] for i, j in zip(np.shape(and_img), TEST_BINARY_DIM))
def test_plantcv_logical_or():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_logical_or")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = np.copy(img1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.logical_or(bin_img1=img1, bin_img2=img2)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.logical_or(bin_img1=img1, bin_img2=img2)
# Test with debug = None
pcv.params.debug = None
or_img = pcv.logical_or(bin_img1=img1, bin_img2=img2)
assert all([i == j] for i, j in zip(np.shape(or_img), TEST_BINARY_DIM))
def test_plantcv_logical_xor():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_logical_xor")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img1 = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
img2 = np.copy(img1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.logical_xor(bin_img1=img1, bin_img2=img2)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.logical_xor(bin_img1=img1, bin_img2=img2)
# Test with debug = None
pcv.params.debug = None
xor_img = pcv.logical_xor(bin_img1=img1, bin_img2=img2)
assert all([i == j] for i, j in zip(np.shape(xor_img), TEST_BINARY_DIM))
def test_plantcv_median_blur():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_median_blur")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.median_blur(gray_img=img, ksize=5)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.median_blur(gray_img=img, ksize=5)
# Test with debug = None
pcv.params.debug = None
blur_img = pcv.median_blur(gray_img=img, ksize=5)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(blur_img), TEST_BINARY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(blur_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_median_blur_bad_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_median_blur_bad_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
_ = pcv.median_blur(img, 5.)
def test_plantcv_naive_bayes_classifier():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_naive_bayes_classifier")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.naive_bayes_classifier(rgb_img=img, pdf_file=os.path.join(TEST_DATA, TEST_PDFS))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.naive_bayes_classifier(rgb_img=img, pdf_file=os.path.join(TEST_DATA, TEST_PDFS))
# Test with debug = None
pcv.params.debug = None
mask = pcv.naive_bayes_classifier(rgb_img=img, pdf_file=os.path.join(TEST_DATA, TEST_PDFS))
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(mask), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(mask), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_naive_bayes_classifier_bad_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.naive_bayes_classifier(rgb_img=img, pdf_file=os.path.join(TEST_DATA, TEST_PDFS_BAD))
def test_plantcv_object_composition():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_object_composition")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS1), encoding="latin1")
object_contours = contours_npz['arr_0']
object_hierarchy = contours_npz['arr_1']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.object_composition(img=img, contours=object_contours, hierarchy=object_hierarchy)
_ = pcv.object_composition(img=img, contours=[], hierarchy=object_hierarchy)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.object_composition(img=img, contours=object_contours, hierarchy=object_hierarchy)
# Test with debug = None
pcv.params.debug = None
contours, mask = pcv.object_composition(img=img, contours=object_contours, hierarchy=object_hierarchy)
# Assert that the objects have been combined
contour_shape = np.shape(contours) # type: tuple
assert contour_shape[1] == 1
def test_plantcv_object_composition_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_object_composition_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), 0)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS1), encoding="latin1")
object_contours = contours_npz['arr_0']
object_hierarchy = contours_npz['arr_1']
# Test with debug = "plot"
pcv.params.debug = "plot"
contours, mask = pcv.object_composition(img=img, contours=object_contours, hierarchy=object_hierarchy)
# Assert that the objects have been combined
contour_shape = np.shape(contours) # type: tuple
assert contour_shape[1] == 1
def test_plantcv_output_mask():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_output_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
img_color = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.output_mask(img=img, mask=mask, filename='test.png', outdir=cache_dir, mask_only=False)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.output_mask(img=img, mask=mask, filename='test.png', outdir=cache_dir, mask_only=False)
_ = pcv.output_mask(img=img_color, mask=mask, filename='test.png', outdir=cache_dir, mask_only=False)
# Test with debug = None
pcv.params.debug = None
imgpath, maskpath, analysis_images = pcv.output_mask(img=img, mask=mask, filename='test.png', mask_only=False)
assert all([os.path.exists(imgpath) is True, os.path.exists(maskpath) is True])
def test_plantcv_output_mask_true():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_output_mask")
pcv.params.debug_outdir = cache_dir
os.mkdir(cache_dir)
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
img_color = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.output_mask(img=img, mask=mask, filename='test.png', outdir=cache_dir, mask_only=True)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.output_mask(img=img_color, mask=mask, filename='test.png', outdir=cache_dir, mask_only=True)
pcv.params.debug = None
imgpath, maskpath, analysis_images = pcv.output_mask(img=img, mask=mask, filename='test.png', outdir=cache_dir,
mask_only=False)
assert all([os.path.exists(imgpath) is True, os.path.exists(maskpath) is True])
def test_plantcv_plot_hist():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_plot_hist")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test in print mode
pcv.params.debug = "print"
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
_ = pcv.plot_hist(gray_img=np.uint16(img), mask=mask, bins=200)
# Test in plot mode
pcv.params.debug = "plot"
hist_header, hist_data, fig_hist = pcv.plot_hist(gray_img=img)
assert np.sum(hist_data[3]) != 0
def test_plantcv_plot_image_matplotlib_input():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_pseudocolor")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
pimg = pcv.pseudocolor(gray_img=img, mask=mask, min_value=10, max_value=200)
with pytest.raises(RuntimeError):
pcv.plot_image(pimg)
def test_plantcv_print_image():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_print_image")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img, path, img_name = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_COLOR))
filename = os.path.join(cache_dir, 'plantcv_print_image.jpg')
pcv.print_image(img=img, filename=filename)
# Assert that the file was created
assert os.path.exists(filename) is True
def test_plantcv_print_image_bad_type():
with pytest.raises(RuntimeError):
pcv.print_image(img=[], filename="/dev/null")
def test_plantcv_pseudocolor():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_pseudocolor")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS), encoding="latin1")
obj_contour = contours_npz['arr_0']
filename = os.path.join(cache_dir, 'plantcv_pseudo_image.jpg')
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.pseudocolor(gray_img=img, mask=None)
_ = pcv.pseudocolor(gray_img=img, mask=None)
pimg = pcv.pseudocolor(gray_img=img, mask=mask, min_value=10, max_value=200)
pcv.print_image(pimg, filename)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.pseudocolor(gray_img=img, mask=mask, background="image")
_ = pcv.pseudocolor(gray_img=img, mask=None)
_ = pcv.pseudocolor(gray_img=img, mask=mask, background="black", obj=obj_contour, axes=False, colorbar=False)
_ = pcv.pseudocolor(gray_img=img, mask=mask, background="image", obj=obj_contour)
_ = pcv.pseudocolor(gray_img=img, mask=None, axes=False, colorbar=False)
# Test with debug = None
pcv.params.debug = None
_ = pcv.pseudocolor(gray_img=img, mask=None)
pseudo_img = pcv.pseudocolor(gray_img=img, mask=mask, background="white")
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(pseudo_img), TEST_BINARY_DIM)):
assert 1
else:
assert 0
def test_plantcv_pseudocolor_bad_input():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_pseudocolor")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
with pytest.raises(RuntimeError):
_ = pcv.pseudocolor(gray_img=img)
def test_plantcv_pseudocolor_bad_background():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_pseudocolor_bad_background")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
with pytest.raises(RuntimeError):
_ = pcv.pseudocolor(gray_img=img, mask=mask, background="pink")
def test_plantcv_readimage_native():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_readimage")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_COLOR))
pcv.params.debug = None
img, path, img_name = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Assert that the image name returned equals the name of the input image
# Assert that the path of the image returned equals the path of the input image
# Assert that the dimensions of the returned image equals the expected dimensions
if img_name == TEST_INPUT_COLOR and path == TEST_DATA:
if all([i == j] for i, j in zip(np.shape(img), TEST_COLOR_DIM)):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_readimage_grayscale():
pcv.params.debug = None
img, path, img_name = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_GRAY), mode="grey")
img, path, img_name = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_GRAY), mode="gray")
assert len(np.shape(img)) == 2
def test_plantcv_readimage_rgb():
pcv.params.debug = None
img, path, img_name = pcv.readimage(filename=os.path.join(TEST_DATA, TEST_INPUT_GRAY), mode="rgb")
assert len(np.shape(img)) == 3
def test_plantcv_readimage_bad_file():
with pytest.raises(RuntimeError):
_ = pcv.readimage(filename=TEST_INPUT_COLOR)
def test_plantcv_readbayer_default_bg():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_readbayer_default_bg")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test with debug = "print"
pcv.params.debug = "print"
_, _, _ = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="BG", alg="default")
# Test with debug = "plot"
pcv.params.debug = "plot"
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="BG", alg="default")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_default_gb():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GB", alg="default")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_default_rg():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="RG", alg="default")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_default_gr():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GR", alg="default")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_edgeaware_bg():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="BG", alg="edgeaware")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_edgeaware_gb():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GB", alg="edgeaware")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_edgeaware_rg():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="RG", alg="edgeaware")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_edgeaware_gr():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GR", alg="edgeaware")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_variablenumbergradients_bg():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="BG", alg="variablenumbergradients")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_variablenumbergradients_gb():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GB", alg="variablenumbergradients")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_variablenumbergradients_rg():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="RG", alg="variablenumbergradients")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_variablenumbergradients_gr():
# Test with debug = None
pcv.params.debug = None
img, path, img_name = pcv.readbayer(filename=os.path.join(TEST_DATA, TEST_INPUT_BAYER),
bayerpattern="GR", alg="variablenumbergradients")
assert all([i == j] for i, j in zip(np.shape(img), (335, 400, 3)))
def test_plantcv_readbayer_default_bad_input():
# Test with debug = None
pcv.params.debug = None
with pytest.raises(RuntimeError):
_, _, _ = pcv.readbayer(filename=os.path.join(TEST_DATA, "no-image.png"), bayerpattern="GR", alg="default")
def test_plantcv_rectangle_mask():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rectangle_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
img_color = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.rectangle_mask(img=img, p1=(0, 0), p2=(2454, 2056), color="white")
_ = pcv.rectangle_mask(img=img, p1=(0, 0), p2=(2454, 2056), color="white")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rectangle_mask(img=img_color, p1=(0, 0), p2=(2454, 2056), color="gray")
# Test with debug = None
pcv.params.debug = None
masked, hist, contour, heir = pcv.rectangle_mask(img=img, p1=(0, 0), p2=(2454, 2056), color="black")
maskedsum = np.sum(masked)
imgsum = np.sum(img)
assert maskedsum < imgsum
def test_plantcv_rectangle_mask_bad_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rectangle_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
img_color = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = None
pcv.params.debug = None
with pytest.raises(RuntimeError):
masked, hist, contour, hier = pcv.rectangle_mask(img=img, p1=(0, 0), p2=(2454, 2056), color="whit")
def test_plantcv_report_size_marker_detect():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_report_size_marker_detect")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER), -1)
# ROI contour
roi_contour = [np.array([[[3550, 850]], [[3550, 1349]], [[4049, 1349]], [[4049, 850]]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
# Test with debug = "print"
pcv.params.debug = "print"
outfile = os.path.join(cache_dir, TEST_INPUT_MARKER)
_ = pcv.report_size_marker_area(img=img, roi_contour=roi_contour, roi_hierarchy=roi_hierarchy, marker='detect',
objcolor='light', thresh_channel='s', thresh=120)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.report_size_marker_area(img=img, roi_contour=roi_contour, roi_hierarchy=roi_hierarchy, marker='detect',
objcolor='light', thresh_channel='s', thresh=120)
# Test with debug = None
pcv.params.debug = None
marker_header, marker_data, images = pcv.report_size_marker_area(img=img, roi_contour=roi_contour,
roi_hierarchy=roi_hierarchy, marker='detect',
objcolor='light', thresh_channel='s', thresh=120)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert marker_data[1] > 100
def test_plantcv_report_size_marker_define():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER), -1)
# ROI contour
roi_contour = [np.array([[[3550, 850]], [[3550, 1349]], [[4049, 1349]], [[4049, 850]]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
# Test with debug = None
pcv.params.debug = None
marker_header, marker_data, images = pcv.report_size_marker_area(img=img, roi_contour=roi_contour,
roi_hierarchy=roi_hierarchy, marker='define',
objcolor='light', thresh_channel='s', thresh=120)
assert marker_data[1] == 250000
def test_plantcv_report_size_marker_grayscale_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# ROI contour
roi_contour = [np.array([[[0, 0]], [[0, 49]], [[49, 49]], [[49, 0]]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
# Test with debug = None
pcv.params.debug = None
marker_header, marker_data, images = pcv.report_size_marker_area(img=img, roi_contour=roi_contour,
roi_hierarchy=roi_hierarchy, marker='define',
objcolor='light', thresh_channel='s', thresh=120)
if cv2.__version__[0] == '2':
assert int(marker_data[1]) == 2401
else:
assert marker_data[1] == 2500
def test_plantcv_report_size_marker_bad_marker_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER), -1)
# ROI contour
roi_contour = [np.array([[[3550, 850]], [[3550, 1349]], [[4049, 1349]], [[4049, 850]]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
with pytest.raises(RuntimeError):
_ = pcv.report_size_marker_area(img=img, roi_contour=roi_contour, roi_hierarchy=roi_hierarchy, marker='none',
objcolor='light', thresh_channel='s', thresh=120)
def test_plantcv_report_size_marker_bad_threshold_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER), -1)
# ROI contour
roi_contour = [np.array([[[3550, 850]], [[3550, 1349]], [[4049, 1349]], [[4049, 850]]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
with pytest.raises(RuntimeError):
_ = pcv.report_size_marker_area(img=img, roi_contour=roi_contour, roi_hierarchy=roi_hierarchy, marker='detect',
objcolor='light', thresh_channel=None, thresh=120)
def test_plantcv_resize():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_resize")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.resize(img=img, resize_x=0.5, resize_y=0.5)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.resize(img=img, resize_x=0.5, resize_y=0.5)
# Test with debug = None
pcv.params.debug = None
resized_img = pcv.resize(img=img, resize_x=0.5, resize_y=0.5)
ix, iy, iz = np.shape(img)
rx, ry, rz = np.shape(resized_img)
assert ix > rx
def test_plantcv_resize_bad_inputs():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test for fatal error caused by two negative resize values
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.resize(img=img, resize_x=-1, resize_y=-1)
def test_plantcv_rgb2gray_hsv():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rgb2gray_hsv")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.rgb2gray_hsv(rgb_img=img, channel="s")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rgb2gray_hsv(rgb_img=img, channel="s")
# Test with debug = None
pcv.params.debug = None
s = pcv.rgb2gray_hsv(rgb_img=img, channel="s")
# Assert that the output image has the dimensions of the input image but is only a single channel
assert all([i == j] for i, j in zip(np.shape(s), TEST_GRAY_DIM))
def test_plantcv_rgb2gray_hsv_bad_input():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.rgb2gray_hsv(rgb_img=img, channel="l")
def test_plantcv_rgb2gray_lab():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rgb2gray_lab")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.rgb2gray_lab(rgb_img=img, channel='b')
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rgb2gray_lab(rgb_img=img, channel='b')
# Test with debug = None
pcv.params.debug = None
b = pcv.rgb2gray_lab(rgb_img=img, channel='b')
# Assert that the output image has the dimensions of the input image but is only a single channel
assert all([i == j] for i, j in zip(np.shape(b), TEST_GRAY_DIM))
def test_plantcv_rgb2gray_lab_bad_input():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.rgb2gray_lab(rgb_img=img, channel="v")
def test_plantcv_rgb2gray():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rgb2gray")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.rgb2gray(rgb_img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rgb2gray(rgb_img=img)
# Test with debug = None
pcv.params.debug = None
gray = pcv.rgb2gray(rgb_img=img)
# Assert that the output image has the dimensions of the input image but is only a single channel
assert all([i == j] for i, j in zip(np.shape(gray), TEST_GRAY_DIM))
def test_plantcv_roi_objects():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_objects")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
roi_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_ROI), encoding="latin1")
roi_contour = roi_npz['arr_0']
roi_hierarchy = roi_npz['arr_1']
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS1), encoding="latin1")
object_contours = contours_npz['arr_0']
object_hierarchy = contours_npz['arr_1']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.roi_objects(img=img, roi_type="largest", roi_contour=roi_contour, roi_hierarchy=roi_hierarchy,
object_contour=object_contours, obj_hierarchy=object_hierarchy)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.roi_objects(img=img, roi_type="partial", roi_contour=roi_contour, roi_hierarchy=roi_hierarchy,
object_contour=object_contours, obj_hierarchy=object_hierarchy)
# Test with debug = None and roi_type = cutto
pcv.params.debug = None
_ = pcv.roi_objects(img=img, roi_type="cutto", roi_contour=roi_contour, roi_hierarchy=roi_hierarchy,
object_contour=object_contours, obj_hierarchy=object_hierarchy)
# Test with debug = None
kept_contours, kept_hierarchy, mask, area = pcv.roi_objects(img=img, roi_type="partial", roi_contour=roi_contour,
roi_hierarchy=roi_hierarchy,
object_contour=object_contours,
obj_hierarchy=object_hierarchy)
# Assert that the contours were filtered as expected
assert len(kept_contours) == 1046
def test_plantcv_roi_objects_bad_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
roi_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_ROI), encoding="latin1")
roi_contour = roi_npz['arr_0']
roi_hierarchy = roi_npz['arr_1']
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS1), encoding="latin1")
object_contours = contours_npz['arr_0']
object_hierarchy = contours_npz['arr_1']
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.roi_objects(img=img, roi_type="cut", roi_contour=roi_contour, roi_hierarchy=roi_hierarchy,
object_contour=object_contours, obj_hierarchy=object_hierarchy)
def test_plantcv_roi_objects_grayscale_input():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_objects_grayscale_input")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR), 0)
roi_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_ROI), encoding="latin1")
roi_contour = roi_npz['arr_0']
roi_hierarchy = roi_npz['arr_1']
contours_npz = np.load(os.path.join(TEST_DATA, TEST_INPUT_CONTOURS1), encoding="latin1")
object_contours = contours_npz['arr_0']
object_hierarchy = contours_npz['arr_1']
# Test with debug = "plot"
pcv.params.debug = "plot"
kept_contours, kept_hierarchy, mask, area = pcv.roi_objects(img=img, roi_type="partial", roi_contour=roi_contour,
roi_hierarchy=roi_hierarchy,
object_contour=object_contours,
obj_hierarchy=object_hierarchy)
# Assert that the contours were filtered as expected
assert len(kept_contours) == 1046
def test_plantcv_rotate():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_rotate_img")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.rotate(img=img, rotation_deg=45, crop=True)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rotate(img=img, rotation_deg=45, crop=True)
# Test with debug = None
pcv.params.debug = None
rotated = pcv.rotate(img=img, rotation_deg=45, crop=True)
imgavg = np.average(img)
rotateavg = np.average(rotated)
assert rotateavg != imgavg
def test_plantcv_rotate_gray():
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.rotate(img=img, rotation_deg=45, crop=False)
# Test with debug = None
pcv.params.debug = None
rotated = pcv.rotate(img=img, rotation_deg=45, crop=False)
imgavg = np.average(img)
rotateavg = np.average(rotated)
assert rotateavg != imgavg
def test_plantcv_scale_features():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_scale_features")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR), encoding="latin1")
obj_contour = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.scale_features(obj=obj_contour, mask=mask, points=TEST_ACUTE_RESULT, line_position=50)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.scale_features(obj=obj_contour, mask=mask, points=TEST_ACUTE_RESULT, line_position='NA')
# Test with debug = None
pcv.params.debug = None
points_rescaled, centroid_rescaled, bottomline_rescaled = pcv.scale_features(obj=obj_contour, mask=mask,
points=TEST_ACUTE_RESULT,
line_position=50)
assert len(points_rescaled) == 23
def test_plantcv_scale_features_bad_input():
mask = np.array([])
obj_contour = np.array([])
pcv.params.debug = None
result = pcv.scale_features(obj=obj_contour, mask=mask, points=TEST_ACUTE_RESULT, line_position=50)
assert all([i == j] for i, j in zip(result, [("NA", "NA"), ("NA", "NA"), ("NA", "NA")]))
def test_plantcv_scharr_filter():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_scharr_filter")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
pcv.params.debug = "print"
# Test with debug = "print"
_ = pcv.scharr_filter(img=img, dx=1, dy=0, scale=1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.scharr_filter(img=img, dx=1, dy=0, scale=1)
# Test with debug = None
pcv.params.debug = None
scharr_img = pcv.scharr_filter(img=img, dx=1, dy=0, scale=1)
# Assert that the output image has the dimensions of the input image
assert all([i == j] for i, j in zip(np.shape(scharr_img), TEST_GRAY_DIM))
def test_plantcv_shift_img():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_shift_img")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_BINARY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.shift_img(img=img, number=300, side="top")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.shift_img(img=img, number=300, side="top")
# Test with debug = "plot"
_ = pcv.shift_img(img=img, number=300, side="bottom")
# Test with debug = "plot"
_ = pcv.shift_img(img=img, number=300, side="right")
# Test with debug = "plot"
_ = pcv.shift_img(img=mask, number=300, side="left")
# Test with debug = None
pcv.params.debug = None
rotated = pcv.shift_img(img=img, number=300, side="top")
imgavg = np.average(img)
shiftavg = np.average(rotated)
assert shiftavg != imgavg
def test_plantcv_shift_img_bad_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.shift_img(img=img, number=-300, side="top")
def test_plantcv_shift_img_bad_side_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.shift_img(img=img, number=300, side="starboard")
def test_plantcv_sobel_filter():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_sobel_filter")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.sobel_filter(gray_img=img, dx=1, dy=0, ksize=1)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.sobel_filter(gray_img=img, dx=1, dy=0, ksize=1)
# Test with debug = None
pcv.params.debug = None
sobel_img = pcv.sobel_filter(gray_img=img, dx=1, dy=0, ksize=1)
# Assert that the output image has the dimensions of the input image
assert all([i == j] for i, j in zip(np.shape(sobel_img), TEST_GRAY_DIM))
def test_plantcv_watershed_segmentation():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_watershed_segmentation")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_CROPPED))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_CROPPED_MASK), -1)
# Test with debug = "print"
pcv.params.debug = "print"
outfile = os.path.join(cache_dir, TEST_INPUT_CROPPED)
_ = pcv.watershed_segmentation(rgb_img=img, mask=mask, distance=10)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.watershed_segmentation(rgb_img=img, mask=mask, distance=10)
# Test with debug = None
pcv.params.debug = None
watershed_header, watershed_data, images = pcv.watershed_segmentation(rgb_img=img, mask=mask, distance=10)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
if cv2.__version__[0] == '2':
assert watershed_data[1] > 9
else:
assert watershed_data[1] > 9
def test_plantcv_white_balance_gray_16bit():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_white_balance_gray_16bit")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.white_balance(img=img, mode='hist', roi=(5, 5, 80, 80))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='max', roi=(5, 5, 80, 80))
# Test without an ROI
pcv.params.debug = None
_ = pcv.white_balance(img=img, mode='hist', roi=None)
# Test with debug = None
white_balanced = pcv.white_balance(img=img, roi=(5, 5, 80, 80))
imgavg = np.average(img)
balancedavg = np.average(white_balanced)
assert balancedavg != imgavg
def test_plantcv_white_balance_gray_8bit():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_white_balance_gray_8bit")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK))
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.white_balance(img=img, mode='hist', roi=(5, 5, 80, 80))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='max', roi=(5, 5, 80, 80))
# Test without an ROI
pcv.params.debug = None
_ = pcv.white_balance(img=img, mode='hist', roi=None)
# Test with debug = None
white_balanced = pcv.white_balance(img=img, roi=(5, 5, 80, 80))
imgavg = np.average(img)
balancedavg = np.average(white_balanced)
assert balancedavg != imgavg
def test_plantcv_white_balance_rgb():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_white_balance_rgb")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER))
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.white_balance(img=img, mode='hist', roi=(5, 5, 80, 80))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='max', roi=(5, 5, 80, 80))
# Test without an ROI
pcv.params.debug = None
_ = pcv.white_balance(img=img, mode='hist', roi=None)
# Test with debug = None
white_balanced = pcv.white_balance(img=img, roi=(5, 5, 80, 80))
imgavg = np.average(img)
balancedavg = np.average(white_balanced)
assert balancedavg != imgavg
def test_plantcv_white_balance_bad_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK), -1)
# Test with debug = None
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='hist', roi=(5, 5, 5, 5, 5))
def test_plantcv_white_balance_bad_mode_input():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_MARKER))
# Test with debug = None
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='histogram', roi=(5, 5, 80, 80))
def test_plantcv_white_balance_bad_input_int():
# Read in test data
img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_NIR_MASK), -1)
# Test with debug = None
with pytest.raises(RuntimeError):
pcv.params.debug = "plot"
_ = pcv.white_balance(img=img, mode='hist', roi=(5., 5, 5, 5))
def test_plantcv_x_axis_pseudolandmarks():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_x_axis_pseudolandmarks_debug")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR), encoding="latin1")
obj_contour = contours_npz['arr_0']
pcv.params.debug = "print"
_ = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
_ = pcv.x_axis_pseudolandmarks(obj=np.array([[0, 0], [0, 0]]), mask=np.array([[0, 0], [0, 0]]), img=img)
_ = pcv.x_axis_pseudolandmarks(obj = np.array(([[89,222]],[[252,39]],[[89,207]])),
mask = np.array(([[42, 161]], [[2, 47]], [[211, 222]])), img=img)
_ = pcv.x_axis_pseudolandmarks(obj=(), mask=mask, img=img)
# Test with debug = None
pcv.params.debug = None
top, bottom, center_v = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert all([all([i == j] for i, j in zip(np.shape(top), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(bottom), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(center_v), (20, 1, 2)))])
def test_plantcv_x_axis_pseudolandmarks_small_obj():
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL_PLANT))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL_PLANT), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR_SMALL_PLANT), encoding="latin1")
obj_contour = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_, _, _ = pcv.x_axis_pseudolandmarks(obj=[], mask=mask, img=img)
_, _, _ = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _, _ = pcv.x_axis_pseudolandmarks(obj=[], mask=mask, img=img)
top, bottom, center_v = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
assert all([all([i == j] for i, j in zip(np.shape(top), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(bottom), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(center_v), (20, 1, 2)))])
def test_plantcv_x_axis_pseudolandmarks_bad_input():
img = np.array([])
mask = np.array([])
obj_contour = np.array([])
pcv.params.debug = None
result = pcv.x_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
assert all([i == j] for i, j in zip(result, [("NA", "NA"), ("NA", "NA"), ("NA", "NA")]))
def test_plantcv_x_axis_pseudolandmarks_bad_obj_input():
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL_PLANT))
with pytest.raises(RuntimeError):
_ = pcv.x_axis_pseudolandmarks(obj=np.array([[-2, -2], [-2, -2]]), mask=np.array([[-2, -2], [-2, -2]]), img=img)
def test_plantcv_y_axis_pseudolandmarks():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_y_axis_pseudolandmarks_debug")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR), encoding="latin1")
obj_contour = contours_npz['arr_0']
pcv.params.debug = "print"
_ = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
pcv.outputs.clear()
_ = pcv.y_axis_pseudolandmarks(obj=[], mask=mask, img=img)
_ = pcv.y_axis_pseudolandmarks(obj=(), mask=mask, img=img)
_ = pcv.y_axis_pseudolandmarks(obj=np.array(([[89, 222]], [[252, 39]], [[89, 207]])),
mask=np.array(([[42, 161]], [[2, 47]], [[211, 222]])), img=img)
_ = pcv.y_axis_pseudolandmarks(obj=np.array(([[21, 11]], [[159, 155]], [[237, 11]])),
mask=np.array(([[38, 54]], [[144, 169]], [[81, 137]])), img=img)
# Test with debug = None
pcv.params.debug = None
left, right, center_h = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert all([all([i == j] for i, j in zip(np.shape(left), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(right), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(center_h), (20, 1, 2)))])
def test_plantcv_y_axis_pseudolandmarks_small_obj():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_y_axis_pseudolandmarks_debug")
os.mkdir(cache_dir)
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL_PLANT))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_MASK_SMALL_PLANT), -1)
contours_npz = np.load(os.path.join(TEST_DATA, TEST_VIS_COMP_CONTOUR_SMALL_PLANT), encoding="latin1")
obj_contour = contours_npz['arr_0']
# Test with debug = "print"
pcv.params.debug = "print"
_, _, _ = pcv.y_axis_pseudolandmarks(obj=[], mask=mask, img=img)
_, _, _ = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
# Test with debug = "plot"
pcv.params.debug = "plot"
pcv.outputs.clear()
left, right, center_h = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert all([all([i == j] for i, j in zip(np.shape(left), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(right), (20, 1, 2))),
all([i == j] for i, j in zip(np.shape(center_h), (20, 1, 2)))])
def test_plantcv_y_axis_pseudolandmarks_bad_input():
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_y_axis_pseudolandmarks_debug")
os.mkdir(cache_dir)
img = np.array([])
mask = np.array([])
obj_contour = np.array([])
pcv.params.debug = None
result = pcv.y_axis_pseudolandmarks(obj=obj_contour, mask=mask, img=img)
pcv.print_results(os.path.join(cache_dir, "results.txt"))
pcv.outputs.clear()
assert all([i == j] for i, j in zip(result, [("NA", "NA"), ("NA", "NA"), ("NA", "NA")]))
def test_plantcv_y_axis_pseudolandmarks_bad_obj_input():
img = cv2.imread(os.path.join(TEST_DATA, TEST_VIS_SMALL_PLANT))
with pytest.raises(RuntimeError):
_ = pcv.y_axis_pseudolandmarks(obj=np.array([[-2, -2], [-2, -2]]), mask=np.array([[-2, -2], [-2, -2]]), img=img)
def test_plantcv_background_subtraction():
# List to hold result of all tests.
truths = []
fg_img = cv2.imread(os.path.join(TEST_DATA, TEST_FOREGROUND))
bg_img = cv2.imread(os.path.join(TEST_DATA, TEST_BACKGROUND))
big_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Testing if background subtraction is actually still working.
# This should return an array whose sum is greater than one
pcv.params.debug = None
fgmask = pcv.background_subtraction(background_image=bg_img, foreground_image=fg_img)
truths.append(np.sum(fgmask) > 0)
fgmask = pcv.background_subtraction(background_image=big_img, foreground_image=bg_img)
truths.append(np.sum(fgmask) > 0)
# The same foreground subtracted from itself should be 0
fgmask = pcv.background_subtraction(background_image=fg_img, foreground_image=fg_img)
truths.append(np.sum(fgmask) == 0)
# The same background subtracted from itself should be 0
fgmask = pcv.background_subtraction(background_image=bg_img, foreground_image=bg_img)
truths.append(np.sum(fgmask) == 0)
# All of these should be true for the function to pass testing.
if cv2.__version__[0] == '2':
assert (all(truths))
else:
assert (all(truths))
def test_plantcv_background_subtraction_debug():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_background_subtraction_debug")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# List to hold result of all tests.
truths = []
fg_img = cv2.imread(os.path.join(TEST_DATA, TEST_FOREGROUND))
bg_img = cv2.imread(os.path.join(TEST_DATA, TEST_BACKGROUND))
# Test with debug = "print"
pcv.params.debug = "print"
fgmask = pcv.background_subtraction(background_image=bg_img, foreground_image=fg_img)
truths.append(np.sum(fgmask) > 0)
# Test with debug = "plot"
pcv.params.debug = "plot"
fgmask = pcv.background_subtraction(background_image=bg_img, foreground_image=fg_img)
truths.append(np.sum(fgmask) > 0)
# All of these should be true for the function to pass testing.
assert (all(truths))
def test_plantcv_background_subtraction_bad_img_type():
fg_color = cv2.imread(os.path.join(TEST_DATA, TEST_FOREGROUND))
bg_gray = cv2.imread(os.path.join(TEST_DATA, TEST_BACKGROUND), 0)
pcv.params.debug = None
with pytest.raises(RuntimeError):
_ = pcv.background_subtraction(background_image=bg_gray, foreground_image=fg_color)
def test_plantcv_background_subtraction_different_sizes():
fg_img = cv2.imread(os.path.join(TEST_DATA, TEST_FOREGROUND))
bg_img = cv2.imread(os.path.join(TEST_DATA, TEST_BACKGROUND))
bg_shp = np.shape(bg_img) # type: tuple
bg_img_resized = cv2.resize(bg_img, (int(bg_shp[0] / 2), int(bg_shp[1] / 2)), interpolation=cv2.INTER_AREA)
pcv.params.debug = None
fgmask = pcv.background_subtraction(background_image=bg_img_resized, foreground_image=fg_img)
assert np.sum(fgmask) > 0
# ##############################
# Tests for the learn subpackage
# ##############################
def test_plantcv_learn_naive_bayes():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_learn_naive_bayes")
os.mkdir(cache_dir)
# Make image and mask directories in the cache directory
imgdir = os.path.join(cache_dir, "images")
maskdir = os.path.join(cache_dir, "masks")
if not os.path.exists(imgdir):
os.mkdir(imgdir)
if not os.path.exists(maskdir):
os.mkdir(maskdir)
# Copy and image and mask to the image/mask directories
shutil.copyfile(os.path.join(TEST_DATA, TEST_VIS_SMALL), os.path.join(imgdir, "image.png"))
shutil.copyfile(os.path.join(TEST_DATA, TEST_MASK_SMALL), os.path.join(maskdir, "image.png"))
# Run the naive Bayes training module
outfile = os.path.join(cache_dir, "naive_bayes_pdfs.txt")
plantcv.learn.naive_bayes(imgdir=imgdir, maskdir=maskdir, outfile=outfile, mkplots=True)
assert os.path.exists(outfile)
def test_plantcv_learn_naive_bayes_multiclass():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_learn_naive_bayes_multiclass")
os.mkdir(cache_dir)
# Run the naive Bayes multiclass training module
outfile = os.path.join(cache_dir, "naive_bayes_multiclass_pdfs.txt")
plantcv.learn.naive_bayes_multiclass(samples_file=os.path.join(TEST_DATA, TEST_SAMPLED_RGB_POINTS), outfile=outfile,
mkplots=True)
assert os.path.exists(outfile)
# ##############################
# Tests for the roi subpackage
# ##############################
def test_plantcv_roi_from_binary_image():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_from_binary_image")
os.mkdir(cache_dir)
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Create a binary image
bin_img = np.zeros(np.shape(rgb_img)[0:2], dtype=np.uint8)
cv2.rectangle(bin_img, (100, 100), (1000, 1000), 255, -1)
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = cache_dir
_, _ = pcv.roi.from_binary_image(bin_img=bin_img, img=rgb_img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _ = pcv.roi.from_binary_image(bin_img=bin_img, img=rgb_img)
# Test with debug = None
pcv.params.debug = None
roi_contour, roi_hierarchy = pcv.roi.from_binary_image(bin_img=bin_img, img=rgb_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 3600, 1, 2)
def test_plantcv_roi_from_binary_image_grayscale_input():
# Read in a test grayscale image
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Create a binary image
bin_img = np.zeros(np.shape(gray_img)[0:2], dtype=np.uint8)
cv2.rectangle(bin_img, (100, 100), (1000, 1000), 255, -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
roi_contour, roi_hierarchy = pcv.roi.from_binary_image(bin_img=bin_img, img=gray_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 3600, 1, 2)
def test_plantcv_roi_from_binary_image_bad_binary_input():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Binary input is required but an RGB input is provided
with pytest.raises(RuntimeError):
_, _ = pcv.roi.from_binary_image(bin_img=rgb_img, img=rgb_img)
def test_plantcv_roi_rectangle():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_rectangle")
os.mkdir(cache_dir)
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = cache_dir
_, _ = pcv.roi.rectangle(x=100, y=100, h=500, w=500, img=rgb_img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _ = pcv.roi.rectangle(x=100, y=100, h=500, w=500, img=rgb_img)
# Test with debug = None
pcv.params.debug = None
roi_contour, roi_hierarchy = pcv.roi.rectangle(x=100, y=100, h=500, w=500, img=rgb_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 4, 1, 2)
def test_plantcv_roi_rectangle_grayscale_input():
# Read in a test grayscale image
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
roi_contour, roi_hierarchy = pcv.roi.rectangle(x=100, y=100, h=500, w=500, img=gray_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 4, 1, 2)
def test_plantcv_roi_rectangle_out_of_frame():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# The resulting rectangle needs to be within the dimensions of the image
with pytest.raises(RuntimeError):
_, _ = pcv.roi.rectangle(x=100, y=100, h=500, w=3000, img=rgb_img)
def test_plantcv_roi_circle():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_circle")
os.mkdir(cache_dir)
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = cache_dir
_, _ = pcv.roi.circle(x=100, y=100, r=50, img=rgb_img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _ = pcv.roi.circle(x=100, y=100, r=50, img=rgb_img)
# Test with debug = None
pcv.params.debug = None
roi_contour, roi_hierarchy = pcv.roi.circle(x=200, y=225, r=75, img=rgb_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 424, 1, 2)
def test_plantcv_roi_circle_grayscale_input():
# Read in a test grayscale image
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
roi_contour, roi_hierarchy = pcv.roi.circle(x=200, y=225, r=75, img=gray_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 424, 1, 2)
def test_plantcv_roi_circle_out_of_frame():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# The resulting rectangle needs to be within the dimensions of the image
with pytest.raises(RuntimeError):
_, _ = pcv.roi.circle(x=50, y=225, r=75, img=rgb_img)
def test_plantcv_roi_ellipse():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_roi_ellipse")
os.mkdir(cache_dir)
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = cache_dir
_, _ = pcv.roi.ellipse(x=200, y=200, r1=75, r2=50, angle=0, img=rgb_img)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _ = pcv.roi.ellipse(x=200, y=200, r1=75, r2=50, angle=0, img=rgb_img)
# Test with debug = None
pcv.params.debug = None
roi_contour, roi_hierarchy = pcv.roi.ellipse(x=200, y=200, r1=75, r2=50, angle=0, img=rgb_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 360, 1, 2)
def test_plantcv_roi_ellipse_grayscale_input():
# Read in a test grayscale image
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "plot"
pcv.params.debug = "plot"
roi_contour, roi_hierarchy = pcv.roi.ellipse(x=200, y=200, r1=75, r2=50, angle=0, img=gray_img)
# Assert the contours and hierarchy lists contain only the ROI
assert np.shape(roi_contour) == (1, 360, 1, 2)
def test_plantcv_roi_ellipse_out_of_frame():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# The resulting rectangle needs to be within the dimensions of the image
with pytest.raises(RuntimeError):
_, _ = pcv.roi.ellipse(x=50, y=225, r1=75, r2=50, angle=0, img=rgb_img)
def test_plantcv_roi_multi():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.roi.multi(rgb_img, coord=[(25, 120), (100, 100)], radius=20)
# Test with debug = None
pcv.params.debug = None
rois1, roi_hierarchy1 = pcv.roi.multi(rgb_img, coord=(25, 120), radius=20, spacing=(10, 10), nrows=3, ncols=6)
# Assert the contours has 18 ROIs
assert len(rois1)==18
def test_plantcv_roi_multi_bad_input():
# Read in test RGB image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_COLOR))
# The user must input a list of custom coordinates OR inputs to make a grid. Not both
with pytest.raises(RuntimeError):
_, _ = pcv.roi.multi(rgb_img, coord=[(25, 120), (100, 100)], radius=20, spacing=(10, 10), nrows=3, ncols=6)
# ##############################
# Tests for the transform subpackage
# ##############################
def test_plantcv_transform_get_color_matrix():
# load in target_matrix
matrix_file = np.load(os.path.join(TEST_DATA, TEST_TARGET_MATRIX), encoding="latin1")
matrix_compare = matrix_file['arr_0']
# Read in rgb_img and gray-scale mask
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
# The result should be a len(np.unique(mask))-1 x 4 matrix
headers, matrix = pcv.transform.get_color_matrix(rgb_img, mask)
assert np.array_equal(matrix, matrix_compare)
def test_plantcv_transform_get_color_matrix_img():
# Read in two gray-scale images
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
mask = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
# The input for rgb_img needs to be an RGB image
with pytest.raises(RuntimeError):
_, _ = pcv.transform.get_color_matrix(rgb_img, mask)
def test_plantcv_transform_get_color_matrix_mask():
# Read in two gray-scale images
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK))
# The input for rgb_img needs to be an RGB image
with pytest.raises(RuntimeError):
_, _ = pcv.transform.get_color_matrix(rgb_img, mask)
def test_plantcv_transform_get_matrix_m():
# load in comparison matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M1), encoding="latin1")
matrix_compare_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_compare_b = matrix_b_file['arr_0']
# read in matrices
t_matrix_file = np.load(os.path.join(TEST_DATA, TEST_TARGET_MATRIX), encoding="latin1")
t_matrix = t_matrix_file['arr_0']
s_matrix_file = np.load(os.path.join(TEST_DATA, TEST_SOURCE1_MATRIX), encoding="latin1")
s_matrix = s_matrix_file['arr_0']
# apply matrices to function
matrix_a, matrix_m, matrix_b = pcv.transform.get_matrix_m(t_matrix, s_matrix)
matrix_compare_m = np.rint(matrix_compare_m)
matrix_compare_b = np.rint(matrix_compare_b)
matrix_m = np.rint(matrix_m)
matrix_b = np.rint(matrix_b)
assert np.array_equal(matrix_m, matrix_compare_m) and np.array_equal(matrix_b, matrix_compare_b)
def test_plantcv_transform_get_matrix_m_unequal_data():
# load in comparison matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M2), encoding="latin1")
matrix_compare_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B2), encoding="latin1")
matrix_compare_b = matrix_b_file['arr_0']
# read in matrices
t_matrix_file = np.load(os.path.join(TEST_DATA, TEST_TARGET_MATRIX), encoding="latin1")
t_matrix = t_matrix_file['arr_0']
s_matrix_file = np.load(os.path.join(TEST_DATA, TEST_SOURCE2_MATRIX), encoding="latin1")
s_matrix = s_matrix_file['arr_0']
# apply matrices to function
matrix_a, matrix_m, matrix_b = pcv.transform.get_matrix_m(t_matrix, s_matrix)
matrix_compare_m = np.rint(matrix_compare_m)
matrix_compare_b = np.rint(matrix_compare_b)
matrix_m = np.rint(matrix_m)
matrix_b = np.rint(matrix_b)
assert np.array_equal(matrix_m, matrix_compare_m) and np.array_equal(matrix_b, matrix_compare_b)
def test_plantcv_transform_calc_transformation_matrix():
# load in comparison matrices
matrix_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_compare = matrix_file['arr_0']
# read in matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M1), encoding="latin1")
matrix_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_b = matrix_b_file['arr_0']
# apply to function
_, matrix_t = pcv.transform.calc_transformation_matrix(matrix_m, matrix_b)
matrix_t = np.rint(matrix_t)
matrix_compare = np.rint(matrix_compare)
assert np.array_equal(matrix_t, matrix_compare)
def test_plantcv_transform_calc_transformation_matrix_b_incorrect():
# read in matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M1), encoding="latin1")
matrix_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_b = matrix_b_file['arr_0']
matrix_b = np.asmatrix(matrix_b, float)
with pytest.raises(RuntimeError):
_, _ = pcv.transform.calc_transformation_matrix(matrix_m, matrix_b.T)
def test_plantcv_transform_calc_transformation_matrix_not_mult():
# read in matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M1), encoding="latin1")
matrix_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_b = matrix_b_file['arr_0']
with pytest.raises(RuntimeError):
_, _ = pcv.transform.calc_transformation_matrix(matrix_m, matrix_b[:3])
def test_plantcv_transform_calc_transformation_matrix_not_mat():
# read in matrices
matrix_m_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_M1), encoding="latin1")
matrix_m = matrix_m_file['arr_0']
matrix_b_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_b = matrix_b_file['arr_0']
with pytest.raises(RuntimeError):
_, _ = pcv.transform.calc_transformation_matrix(matrix_m[:, 1], matrix_b[:, 1])
def test_plantcv_transform_apply_transformation():
# load corrected image to compare
corrected_compare = cv2.imread(os.path.join(TEST_DATA, TEST_S1_CORRECTED))
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform")
os.mkdir(cache_dir)
# Make image and mask directories in the cache directory
imgdir = os.path.join(cache_dir, "images")
# read in matrices
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# read in images
target_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
source_img = cv2.imread(os.path.join(TEST_DATA, TEST_SOURCE1_IMG))
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = imgdir
_ = pcv.transform.apply_transformation_matrix(source_img, target_img, matrix_t)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.transform.apply_transformation_matrix(source_img, target_img, matrix_t)
# Test with debug = None
pcv.params.debug = None
corrected_img = pcv.transform.apply_transformation_matrix(source_img, target_img, matrix_t)
# assert source and corrected have same shape
assert np.array_equal(corrected_img, corrected_compare)
def test_plantcv_transform_apply_transformation_incorrect_t():
# read in matrices
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_MATRIX_B1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# read in images
target_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
source_img = cv2.imread(os.path.join(TEST_DATA, TEST_SOURCE1_IMG))
with pytest.raises(RuntimeError):
_ = pcv.transform.apply_transformation_matrix(source_img, target_img, matrix_t)
def test_plantcv_transform_apply_transformation_incorrect_img():
# read in matrices
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# read in images
target_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
source_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
with pytest.raises(RuntimeError):
_ = pcv.transform.apply_transformation_matrix(source_img, target_img, matrix_t)
def test_plantcv_transform_save_matrix():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform")
os.mkdir(cache_dir)
# read in matrix
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# .npz filename
filename = os.path.join(cache_dir, 'test.npz')
pcv.transform.save_matrix(matrix_t, filename)
assert os.path.exists(filename) is True
def test_plantcv_transform_save_matrix_incorrect_filename():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform")
os.mkdir(cache_dir)
# read in matrix
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# .npz filename
filename = "test"
with pytest.raises(RuntimeError):
pcv.transform.save_matrix(matrix_t, filename)
def test_plantcv_transform_load_matrix():
# read in matrix_t
matrix_t_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_t = matrix_t_file['arr_0']
# test load function with matrix_t
matrix_t_loaded = pcv.transform.load_matrix(os.path.join(TEST_DATA, TEST_TRANSFORM1))
assert np.array_equal(matrix_t, matrix_t_loaded)
def test_plantcv_transform_correct_color():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform")
os.mkdir(cache_dir)
# load corrected image to compare
corrected_compare = cv2.imread(os.path.join(TEST_DATA, TEST_S1_CORRECTED))
# load in comparison matrices
matrix_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_compare = matrix_file['arr_0']
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_correct_color")
os.mkdir(cache_dir)
# Make image and mask directories in the cache directory
imgdir = os.path.join(cache_dir, "images")
matdir = os.path.join(cache_dir, "saved_matrices")
# Read in target, source, and gray-scale mask
target_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
source_img = cv2.imread(os.path.join(TEST_DATA, TEST_SOURCE1_IMG))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
output_path = os.path.join(matdir)
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = imgdir
_, _, _, _ = pcv.transform.correct_color(target_img, mask, source_img, mask, cache_dir)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _, _, _ = pcv.transform.correct_color(target_img, mask, source_img, mask, output_path)
# Test with debug = None
pcv.params.debug = None
_, _, matrix_t, corrected_img = pcv.transform.correct_color(target_img, mask, source_img, mask, output_path)
# assert source and corrected have same shape
assert np.array_equal(corrected_img, corrected_compare) and \
os.path.exists(os.path.join(output_path, "target_matrix.npz")) is True and \
os.path.exists(os.path.join(output_path, "source_matrix.npz")) is True and \
os.path.exists(os.path.join(output_path, "transformation_matrix.npz")) is True
def test_plantcv_transform_correct_color_output_dne():
# load corrected image to compare
corrected_compare = cv2.imread(os.path.join(TEST_DATA, TEST_S1_CORRECTED))
# load in comparison matrices
matrix_file = np.load(os.path.join(TEST_DATA, TEST_TRANSFORM1), encoding="latin1")
matrix_compare = matrix_file['arr_0']
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_correct_color_output_dne")
os.mkdir(cache_dir)
# Make image and mask directories in the cache directory
imgdir = os.path.join(cache_dir, "images")
# Read in target, source, and gray-scale mask
target_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
source_img = cv2.imread(os.path.join(TEST_DATA, TEST_SOURCE1_IMG))
mask = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_MASK), -1)
output_path = os.path.join(cache_dir, "saved_matrices_1") # output_directory that does not currently exist
# Test with debug = "print"
pcv.params.debug = "print"
pcv.params.debug_outdir = imgdir
_, _, _, _ = pcv.transform.correct_color(target_img, mask, source_img, mask, output_path)
# Test with debug = "plot"
pcv.params.debug = "plot"
_, _, _, _ = pcv.transform.correct_color(target_img, mask, source_img, mask, output_path)
# Test with debug = None
pcv.params.debug = None
_, _, matrix_t, corrected_img = pcv.transform.correct_color(target_img, mask, source_img, mask, output_path)
# assert source and corrected have same shape
assert np.array_equal(corrected_img, corrected_compare) and \
os.path.exists(os.path.join(output_path, "target_matrix.npz")) is True and \
os.path.exists(os.path.join(output_path, "source_matrix.npz")) is True and \
os.path.exists(os.path.join(output_path, "transformation_matrix.npz")) is True
def test_plantcv_transform_create_color_card_mask():
# Load target image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_create_color_card_mask")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=(166, 166),
spacing=(21, 21), nrows=6, ncols=4, exclude=[20, 0])
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=(166, 166),
spacing=(21, 21), nrows=6, ncols=4, exclude=[20, 0])
# Test with debug = None
pcv.params.debug = None
mask = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=(166, 166),
spacing=(21, 21), nrows=6, ncols=4, exclude=[20, 0])
assert all([i == j] for i, j in zip(np.unique(mask), np.array([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110,
120, 130, 140, 150, 160, 170, 180, 190, 200, 210,
220], dtype=np.uint8)))
def test_plantcv_transform_quick_color_check():
# Load target image
t_matrix = np.load(os.path.join(TEST_DATA, TEST_TARGET_MATRIX), encoding="latin1")
target_matrix = t_matrix['arr_0']
s_matrix = np.load(os.path.join(TEST_DATA, TEST_SOURCE1_MATRIX), encoding="latin1")
source_matrix = s_matrix['arr_0']
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_quick_color_check")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test with debug = "print"
pcv.params.debug = "print"
pcv.transform.quick_color_check(target_matrix, source_matrix, num_chips=22)
# Test with debug = "plot"
pcv.params.debug = "plot"
pcv.transform.quick_color_check(target_matrix, source_matrix, num_chips=22)
#Test with debug = None
pcv.params.debug = None
pcv.transform.quick_color_check(target_matrix, source_matrix, num_chips=22)
assert os.path.exists(os.path.join(cache_dir, "color_quick_check.png"))
def test_plantcv_transform_find_color_card():
# Load rgb image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_find_color_card")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
df, start, space = pcv.transform.find_color_card(rgb_img=rgb_img, threshold='adaptgauss', blurry=False)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start,
spacing=space, nrows=6, ncols=4, exclude=[20, 0])
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start,
spacing=space, nrows=6, ncols=4, exclude=[20, 0])
# Test with debug = None
pcv.params.debug = None
mask = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start,
spacing=space, nrows=6, ncols=4, exclude=[20, 0])
assert all([i == j] for i, j in zip(np.unique(mask), np.array([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110,
120, 130, 140, 150, 160, 170, 180, 190, 200, 210,
220], dtype=np.uint8)))
def test_plantcv_transform_find_color_card_optional_parameters():
# Load rgb image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG_COLOR_CARD))
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_transform_find_color_card")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Test with threshold ='normal'
df1, start1, space1 = pcv.transform.find_color_card(rgb_img=rgb_img, threshold='normal', blurry=True,
background='light')
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start1,
spacing=space1, nrows=6, ncols=4, exclude=[20, 0])
# Test with threshold='otsu'
df2, start2, space2 = pcv.transform.find_color_card(rgb_img=rgb_img, threshold='otsu', blurry=True)
_ = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start2,
spacing=space2, nrows=6, ncols=4, exclude=[20, 0])
# Test with debug = None
pcv.params.debug = None
mask = pcv.transform.create_color_card_mask(rgb_img=rgb_img, radius=6, start_coord=start2,
spacing=space2, nrows=6, ncols=4, exclude=[20, 0])
assert all([i == j] for i, j in zip(np.unique(mask), np.array([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110,
120, 130, 140, 150, 160, 170, 180, 190, 200, 210,
220], dtype=np.uint8)))
def test_plantcv_transform_find_color_card_bad_thresh_input():
# Load rgb image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
with pytest.raises(RuntimeError):
pcv.params.debug = None
_, _, _ = pcv.transform.find_color_card(rgb_img=rgb_img, threshold='gaussian')
def test_plantcv_transform_find_color_card_bad_background_input():
# Load rgb image
rgb_img = cv2.imread(os.path.join(TEST_DATA, TEST_TARGET_IMG))
with pytest.raises(RuntimeError):
pcv.params.debug = None
_, _, _ = pcv.transform.find_color_card(rgb_img=rgb_img, background='lite')
# ##############################
# Tests for the threshold subpackage
# ##############################
def test_plantcv_threshold_binary():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_binary")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with object type = dark
pcv.params.debug = None
_ = pcv.threshold.binary(gray_img=gray_img, threshold=25, max_value=255, object_type="dark")
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.threshold.binary(gray_img=gray_img, threshold=25, max_value=255, object_type="light")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.threshold.binary(gray_img=gray_img, threshold=25, max_value=255, object_type="light")
# Test with debug = None
pcv.params.debug = None
binary_img = pcv.threshold.binary(gray_img=gray_img, threshold=25, max_value=255, object_type="light")
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_threshold_binary_incorrect_object_type():
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.threshold.binary(gray_img=gray_img, threshold=25, max_value=255, object_type="lite")
def test_plantcv_threshold_gaussian():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_gaussian")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with object type = dark
pcv.params.debug = None
_ = pcv.threshold.gaussian(gray_img=gray_img, max_value=255, object_type="dark")
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.threshold.gaussian(gray_img=gray_img, max_value=255, object_type="light")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.threshold.gaussian(gray_img=gray_img, max_value=255, object_type="light")
# Test with debug = None
pcv.params.debug = None
binary_img = pcv.threshold.gaussian(gray_img=gray_img, max_value=255, object_type="light")
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_threshold_gaussian_incorrect_object_type():
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.threshold.gaussian(gray_img=gray_img, max_value=255, object_type="lite")
def test_plantcv_threshold_mean():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_mean")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with object type = dark
pcv.params.debug = None
_ = pcv.threshold.mean(gray_img=gray_img, max_value=255, object_type="dark")
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.threshold.mean(gray_img=gray_img, max_value=255, object_type="light")
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.threshold.mean(gray_img=gray_img, max_value=255, object_type="light")
# Test with debug = None
pcv.params.debug = None
binary_img = pcv.threshold.mean(gray_img=gray_img, max_value=255, object_type="light")
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_threshold_mean_incorrect_object_type():
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.threshold.mean(gray_img=gray_img, max_value=255, object_type="lite")
def test_plantcv_threshold_otsu():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_otsu")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INTPUT_GREENMAG), -1)
# Test with object set to light
pcv.params.debug = None
_ = pcv.threshold.otsu(gray_img=gray_img, max_value=255, object_type="light")
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.threshold.otsu(gray_img=gray_img, max_value=255, object_type='dark')
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.threshold.otsu(gray_img=gray_img, max_value=255, object_type='dark')
# Test with debug = None
pcv.params.debug = None
binary_img = pcv.threshold.otsu(gray_img=gray_img, max_value=255, object_type='dark')
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_threshold_otsu_incorrect_object_type():
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.threshold.otsu(gray_img=gray_img, max_value=255, object_type="lite")
def test_plantcv_threshold_triangle():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_triangle")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
# Read in test data
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
# Test with debug = "print"
pcv.params.debug = "print"
_ = pcv.threshold.triangle(gray_img=gray_img, max_value=255, object_type="dark", xstep=10)
# Test with debug = "plot"
pcv.params.debug = "plot"
_ = pcv.threshold.triangle(gray_img=gray_img, max_value=255, object_type="light", xstep=10)
# Test with debug = None
pcv.params.debug = None
binary_img = pcv.threshold.triangle(gray_img=gray_img, max_value=255, object_type="light", xstep=10)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
def test_plantcv_threshold_triangle_incorrect_object_type():
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY), -1)
with pytest.raises(RuntimeError):
pcv.params.debug = None
_ = pcv.threshold.triangle(gray_img=gray_img, max_value=255, object_type="lite", xstep=10)
def test_plantcv_threshold_texture():
# Test cache directory
cache_dir = os.path.join(TEST_TMPDIR, "test_plantcv_threshold_texture")
os.mkdir(cache_dir)
pcv.params.debug_outdir = cache_dir
gray_img = cv2.imread(os.path.join(TEST_DATA, TEST_INPUT_GRAY_SMALL), -1)
binary_img = pcv.threshold.texture(gray_img, ksize=6, threshold=7, offset=3, texture_method='dissimilarity',
borders='nearest', max_value=255)
# Assert that the output image has the dimensions of the input image
if all([i == j] for i, j in zip(np.shape(binary_img), TEST_GRAY_DIM)):
# Assert that the image is binary
if all([i == j] for i, j in zip(np.unique(binary_img), [0, 255])):
assert 1
else:
assert 0
else:
assert 0
# ##############################
# Clean up test files
# ##############################
def teardown_function():
shutil.rmtree(TEST_TMPDIR)
| 44.423007
| 120
| 0.672444
| 21,966
| 147,129
| 4.252162
| 0.035646
| 0.031798
| 0.050641
| 0.064752
| 0.908301
| 0.886438
| 0.867092
| 0.846547
| 0.828903
| 0.808325
| 0
| 0.032679
| 0.198418
| 147,129
| 3,311
| 121
| 44.436424
| 0.759302
| 0.126127
| 0
| 0.645031
| 0
| 0
| 0.058348
| 0.030206
| 0
| 0
| 0
| 0
| 0.073859
| 1
| 0.087287
| false
| 0
| 0.003581
| 0
| 0.090868
| 0.043868
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c91c68395977614b7ef02aa13e0392268f147a7e
| 33
|
py
|
Python
|
pycachecleaner/__init__.py
|
mikk357/pycachecleaner
|
71079a6a68bf04476dbf27495374171606a8b02d
|
[
"MIT"
] | 2
|
2021-01-20T05:45:46.000Z
|
2021-01-20T05:47:08.000Z
|
pycachecleaner/__init__.py
|
mikk357/pycachecleaner
|
71079a6a68bf04476dbf27495374171606a8b02d
|
[
"MIT"
] | null | null | null |
pycachecleaner/__init__.py
|
mikk357/pycachecleaner
|
71079a6a68bf04476dbf27495374171606a8b02d
|
[
"MIT"
] | null | null | null |
from .pycachecleaner import clean
| 33
| 33
| 0.878788
| 4
| 33
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 33
| 1
| 33
| 33
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a310ff1c2be1b6176566a19e6701acb2ed16b36c
| 106
|
py
|
Python
|
tests/tests/__init__.py
|
aptivate/django-organizations
|
3ac867493508612370066c00ca7bd8d55632e116
|
[
"BSD-2-Clause"
] | 1
|
2017-09-06T08:19:18.000Z
|
2017-09-06T08:19:18.000Z
|
tests/tests/__init__.py
|
philippeowagner/django-organizations
|
0c2dd98b5c5af0e3de7cbd4a23567213c5222ac6
|
[
"BSD-2-Clause"
] | null | null | null |
tests/tests/__init__.py
|
philippeowagner/django-organizations
|
0c2dd98b5c5af0e3de7cbd4a23567213c5222ac6
|
[
"BSD-2-Clause"
] | null | null | null |
from .models import *
from .urls import *
from .forms import *
##from .views import *
from .base import *
| 17.666667
| 22
| 0.698113
| 15
| 106
| 4.933333
| 0.466667
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 106
| 5
| 23
| 21.2
| 0.860465
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a33439bb4072b1be85d7c0c01bf680d5fd488d33
| 129
|
py
|
Python
|
microproxy/event/__init__.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 20
|
2016-04-17T08:43:26.000Z
|
2021-05-31T04:01:27.000Z
|
microproxy/event/__init__.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 237
|
2016-04-17T07:07:08.000Z
|
2017-01-26T09:15:52.000Z
|
microproxy/event/__init__.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 5
|
2016-04-16T14:22:45.000Z
|
2019-11-27T04:41:55.000Z
|
from manager import EventManager
from client import EventClient
from manager import start_events_server
from types import REPLAY
| 25.8
| 39
| 0.875969
| 18
| 129
| 6.166667
| 0.611111
| 0.198198
| 0.306306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124031
| 129
| 4
| 40
| 32.25
| 0.982301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a34679d8c86c8c4720cfd202136b00c7d12e5a23
| 50,318
|
py
|
Python
|
tests/legacy_unittest/test_fee_engine.py
|
bayeshack2016/icon-service
|
36cab484d2e41548d7f2f74526f127ee3a4423fc
|
[
"Apache-2.0"
] | 52
|
2018-08-24T02:28:43.000Z
|
2021-07-06T04:44:22.000Z
|
tests/legacy_unittest/test_fee_engine.py
|
bayeshack2016/icon-service
|
36cab484d2e41548d7f2f74526f127ee3a4423fc
|
[
"Apache-2.0"
] | 62
|
2018-09-17T06:59:16.000Z
|
2021-12-15T06:02:51.000Z
|
tests/legacy_unittest/test_fee_engine.py
|
bayeshack2016/icon-service
|
36cab484d2e41548d7f2f74526f127ee3a4423fc
|
[
"Apache-2.0"
] | 35
|
2018-09-14T02:42:10.000Z
|
2022-02-05T10:34:46.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2019 ICON Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from random import randrange
from unittest.mock import Mock
from iconservice.base.address import AddressPrefix, Address
from iconservice.base.block import Block
from iconservice.base.exception import InvalidRequestException, OutOfBalanceException
from iconservice.base.transaction import Transaction
from iconservice.database.db import ContextDatabase
from iconservice.deploy import DeployStorage
from iconservice.deploy.storage import IconScoreDeployInfo
from iconservice.fee import FeeEngine, FeeStorage
from iconservice.fee.engine import VirtualStepCalculator, FIXED_TERM
from iconservice.icon_constant import IconScoreContextType, DeployState
from iconservice.iconscore.icon_score_context import IconScoreContext
from iconservice.iconscore.context.context import ContextContainer
from iconservice.iconscore.icon_score_step import IconScoreStepCounter
from iconservice.icx import IcxEngine
from iconservice.icx import IcxStorage
from iconservice.icx.coin_part import CoinPartType
from iconservice.utils import ContextStorage, ContextEngine
from tests.legacy_unittest.mock_generator import clear_inner_task
def create_context_db():
"""
Create memory db for ContextDatabase
:return: ContextDatabase
"""
memory_db = {}
# noinspection PyUnusedLocal
def put(context, key, value):
memory_db[key] = value
# noinspection PyUnusedLocal
def get(context, key):
return memory_db.get(key)
# noinspection PyUnusedLocal
def delete(context, key):
del memory_db[key]
context_db = Mock(spec=ContextDatabase)
context_db.get = get
context_db.put = put
context_db.delete = delete
return context_db
def patch_fee_storage(fee_storage: FeeStorage):
memory_db = {}
# noinspection PyUnusedLocal
def put(context, key, value):
memory_db[key] = value
# noinspection PyUnusedLocal
def put_deposit(context, deposit):
memory_db[deposit.id] = deposit
# noinspection PyUnusedLocal
def get(context, key):
return memory_db[key] if key in memory_db else None
# noinspection PyUnusedLocal
def delete(context, key):
del memory_db[key]
fee_storage.put_deposit_meta = put
fee_storage.get_deposit_meta = get
fee_storage.delete_deposit_meta = delete
fee_storage.put_deposit = put_deposit
fee_storage.get_deposit = get
fee_storage.delete_deposit = delete
def get_rand_term():
if FIXED_TERM:
return FeeEngine._MIN_DEPOSIT_TERM
else:
return randrange(FeeEngine._MIN_DEPOSIT_TERM, FeeEngine._MAX_DEPOSIT_TERM)
calculate_virtual_step = VirtualStepCalculator.calculate_virtual_step
class TestFeeEngine(unittest.TestCase):
def setUp(self):
context = IconScoreContext(IconScoreContextType.DIRECT)
block = Mock(spec=Block)
block.attach_mock(Mock(return_value=0), 'height')
context.block = block
self._sender = Address.from_data(AddressPrefix.EOA, os.urandom(20))
self._score_address = Address.from_data(AddressPrefix.CONTRACT, os.urandom(20))
context_db = create_context_db()
self.deploy_storage = DeployStorage(context_db)
deploy_info = IconScoreDeployInfo(self._score_address,
DeployState.ACTIVE,
self._sender,
os.urandom(32),
os.urandom(32))
self.icx_storage = IcxStorage(context_db)
self._icx_engine = IcxEngine()
self.fee_storage = FeeStorage(context_db)
patch_fee_storage(self.fee_storage)
self.deploy_storage.put_deploy_info(context, deploy_info)
context.storage = ContextStorage(deploy=self.deploy_storage, fee=self.fee_storage, icx=self.icx_storage,
iiss=None, prep=None, issue=None, rc=None, meta=None)
context.engine = ContextEngine(deploy=None, fee=None, icx=self._icx_engine, iiss=None, prep=None, issue=None)
self._icx_engine.open(self.icx_storage)
self.icx_storage._put_genesis_data_account(context,
CoinPartType.GENERAL,
self._sender,
100000000 * 10 ** 18)
self.icx_storage._put_genesis_data_account(context, CoinPartType.TREASURY,
Address.from_data(AddressPrefix.EOA, os.urandom(20)), 0)
self._engine = FeeEngine()
def tearDown(self):
ContextContainer._clear_context()
clear_inner_task()
VirtualStepCalculator.calculate_virtual_step = calculate_virtual_step
def get_context(self):
context = IconScoreContext(IconScoreContextType.INVOKE)
context.step_counter = Mock(spec=IconScoreStepCounter)
context.step_counter.step_price = 10 ** 10
context.tx = Mock(spec=Transaction)
context.tx.to = self._score_address
block = Mock(spec=Block)
block.attach_mock(Mock(return_value=0), 'height')
context.block = block
context.storage = ContextStorage(deploy=self.deploy_storage,fee=self.fee_storage, icx=self.icx_storage,
iiss=None, prep=None, issue=None, rc=None, meta=None)
context.engine = ContextEngine(deploy=None, fee=None, icx=self._icx_engine, iiss=None, prep=None, issue=None)
return context
def _deposit_bulk(self, count):
self.context = self.get_context()
self.block_height = 0
input_params = []
for i in range(count):
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
before_sender_balance = self._icx_engine.get_balance(self.context, self._sender)
self._engine.add_deposit(
self.context, tx_hash, self._sender, self._score_address, amount, block_height, term)
after_sender_balance = self._icx_engine.get_balance(self.context, self._sender)
self.assertEqual(amount, before_sender_balance - after_sender_balance)
input_params.append((tx_hash, amount, block_height, term))
return input_params
def test_deposit_fee(self):
context = self.get_context()
block_height = 0
size = randrange(10, 100)
input_param = self._deposit_bulk(size)
deposit_info = self._engine.get_deposit_info(context, self._score_address, block_height)
self.assertEqual(size, len(deposit_info.deposits))
for i in range(size):
tx_hash, amount, block_height, term = input_param[i]
deposit = deposit_info.deposits[i]
self.assertEqual(tx_hash, deposit.id)
self.assertEqual(self._sender, deposit.sender)
self.assertEqual(self._score_address, deposit.score_address)
self.assertEqual(amount, deposit.deposit_amount)
self.assertEqual(block_height, deposit.created)
self.assertEqual(block_height + term, deposit.expires)
def test_deposit_append_and_delete(self):
size = randrange(10, 100)
deposit_list = self._deposit_bulk(size)
for i in range(size):
index = randrange(0, size)
size -= 1
withdrawal_deposit_id = deposit_list.pop(index)[0]
self._engine.withdraw_deposit(self.context, self._sender, withdrawal_deposit_id, 1)
deposit_info = self._engine.get_deposit_info(self.context, self._score_address, 1)
for j in range(size):
deposit = deposit_info.deposits[j]
self.assertEqual(deposit.id, deposit_list[j][0])
self.assertEqual(self._sender, deposit.sender)
self.assertEqual(self._score_address, deposit.score_address)
self.assertEqual(deposit.deposit_amount, deposit_list[j][1])
self.assertEqual(deposit.created, deposit_list[j][2])
self.assertEqual(deposit.expires, deposit_list[j][2] + deposit_list[j][3])
input_param = self._deposit_bulk(100)
deposit_info = self._engine.get_deposit_info(self.context, self._score_address, self.block_height)
self.assertEqual(100, len(deposit_info.deposits))
for i in range(size):
tx_hash, amount, block_height, term = input_param[i]
deposit = deposit_info.deposits[i]
self.assertEqual(tx_hash, deposit.id)
self.assertEqual(self._sender, deposit.sender)
self.assertEqual(self._score_address, deposit.score_address)
self.assertEqual(amount, deposit.deposit_amount)
self.assertEqual(block_height, deposit.created)
self.assertEqual(block_height + term, deposit.expires)
def test_deposit_fee_invalid_param(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
# invalid amount (underflow)
# noinspection PyTypeChecker
with self.assertRaises(InvalidRequestException) as e:
inv_amount = randrange(0, FeeEngine._MIN_DEPOSIT_AMOUNT - 1)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address,
inv_amount, block_height, term)
# noinspection PyUnresolvedReferences
self.assertEqual('Invalid deposit amount', e.exception.message)
# invalid amount (overflow)
# noinspection PyTypeChecker
with self.assertRaises(InvalidRequestException) as e:
inv_amount = \
randrange(FeeEngine._MAX_DEPOSIT_AMOUNT + 1, FeeEngine._MAX_DEPOSIT_AMOUNT * 10)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address,
inv_amount, block_height, term)
# noinspection PyUnresolvedReferences
self.assertEqual('Invalid deposit amount', e.exception.message)
# invalid term (underflow)
# noinspection PyTypeChecker
with self.assertRaises(InvalidRequestException) as e:
inv_term = randrange(0, FeeEngine._MIN_DEPOSIT_TERM - 1)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address,
amount, block_height, inv_term)
# noinspection PyUnresolvedReferences
self.assertEqual('Invalid deposit term', e.exception.message)
# invalid term (overflow)
# noinspection PyTypeChecker
with self.assertRaises(InvalidRequestException) as e:
inv_term = \
randrange(FeeEngine._MAX_DEPOSIT_TERM + 1, FeeEngine._MAX_DEPOSIT_TERM * 10)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address,
amount, block_height, inv_term)
# noinspection PyUnresolvedReferences
self.assertEqual('Invalid deposit term', e.exception.message)
# invalid owner
# noinspection PyTypeChecker
with self.assertRaises(InvalidRequestException) as e:
inv_sender = Address.from_data(AddressPrefix.EOA, os.urandom(20))
self._engine.add_deposit(context, tx_hash, inv_sender, self._score_address,
amount, block_height, term)
# noinspection PyUnresolvedReferences
self.assertEqual('Invalid SCORE owner', e.exception.message)
def test_deposit_fee_out_of_balance(self):
context = self.get_context()
self.icx_storage._put_genesis_data_account(
context, CoinPartType.GENERAL, self._sender, 10000 * 10 ** 18)
tx_hash = os.urandom(32)
amount = 10001 * 10 ** 18
block_height = randrange(100, 10000)
term = get_rand_term()
# out of balance
# noinspection PyTypeChecker
with self.assertRaises(OutOfBalanceException) as e:
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address,
amount, block_height, term)
# noinspection PyUnresolvedReferences
self.assertEqual('Out of balance', e.exception.message)
def test_deposit_fee_available_head_ids(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = 10000 * 10 ** 18
block_height = 1000
self.icx_storage._put_genesis_data_account(context,
CoinPartType.GENERAL,
self._sender,
amount)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, None)
self.assertEqual(deposit_meta.available_head_id_of_deposit, None)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address, amount, block_height,
FeeEngine._MIN_DEPOSIT_TERM)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, tx_hash)
self.assertEqual(deposit_meta.available_head_id_of_deposit, tx_hash)
def test_deposit_fee_expires_updated(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = 10000 * 10 ** 18
block_height = 1000
term = FeeEngine._MIN_DEPOSIT_TERM
self.icx_storage._put_genesis_data_account(context,
CoinPartType.GENERAL,
self._sender,
amount)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.expires_of_virtual_step, -1)
self.assertEqual(deposit_meta.expires_of_deposit, -1)
self._engine.add_deposit(context, tx_hash, self._sender, self._score_address, amount, block_height, term)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.expires_of_virtual_step, block_height + term)
self.assertEqual(deposit_meta.expires_of_deposit, block_height + term)
def test_withdraw_fee_without_penalty(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
before_sender_balance = self._icx_engine.get_balance(context, self._sender)
self._engine.withdraw_deposit(context, self._sender, tx_hash, block_height + term + 1)
after_sender_balance = self._icx_engine.get_balance(context, self._sender)
deposit_info = self._engine.get_deposit_info(context, self._score_address, block_height)
self.assertIsNone(deposit_info)
self.assertEqual(amount, after_sender_balance - before_sender_balance)
def test_withdraw_fee_with_penalty(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
before_sender_balance = self._icx_engine.get_balance(context, self._sender)
self._engine.withdraw_deposit(context, self._sender, tx_hash, block_height + term - 1)
after_sender_balance = self._icx_engine.get_balance(context, self._sender)
deposit_info = self._engine.get_deposit_info(context, self._score_address, block_height)
self.assertIsNone(deposit_info)
self.assertGreater(after_sender_balance - before_sender_balance, 0)
self.assertLessEqual(after_sender_balance - before_sender_balance, amount)
def test_withdraw_fee_and_updates_previous_and_next_link_ascending(self):
"""
Given: There are four deposits.
When : Withdraws all of them sequentially(ascending).
Then : Checks if the previous and next link update correctly.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
term = get_rand_term()
block_height += 1
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
for i in range(cnt_deposit):
target_deposit = self._engine.get_deposit(context, arr_tx_hash[i])
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[i], block_height + term // 2)
if cnt_deposit - 1 == i:
self.assertIsNone(target_deposit.next_id)
break
next_deposit = self._engine.get_deposit(context, target_deposit.next_id)
self.assertEqual(next_deposit.prev_id, None)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(next_deposit.id, deposit_meta.head_id)
def test_withdraw_fee_and_updates_previous_and_next_link_descending(self):
"""
Given: There are four deposits.
When : Withdraws all of them sequentially(descending).
Then : Checks if the previous and next link update correctly.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
term = get_rand_term()
block_height += 1
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
for i in range(cnt_deposit - 1, -1, -1):
target_deposit = self._engine.get_deposit(context, arr_tx_hash[i])
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[i], block_height + term // 2)
if i == 0:
self.assertIsNone(target_deposit.prev_id)
break
prev_deposit = self._engine.get_deposit(context, target_deposit.prev_id)
self.assertEqual(prev_deposit.next_id, None)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(prev_deposit.id, deposit_meta.tail_id)
@unittest.skipIf(FIXED_TERM is True, "FIXED_TERM is true")
def test_withdraw_fee_when_available_head_id_of_virtual_step_is_same_as_deposit_id(self):
"""
Given: There are four deposits. Only the last deposit has enough to long term.
When : Available head id of the virtual step is same as deposit id.
Then : Searches for next deposit id which is available to use virtual step
and where expires of the deposit is more than current block height.
In the test, only the last deposit is available.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height += 1
if i != cnt_deposit - 1:
term = FeeEngine._MIN_DEPOSIT_TERM
else:
term = FeeEngine._MAX_DEPOSIT_TERM
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, arr_tx_hash[0])
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[0],
block_height + FeeEngine._MAX_DEPOSIT_TERM // 2)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, arr_tx_hash[len(arr_tx_hash) - 1])
@unittest.skipIf(FIXED_TERM is True, "FIXED_TERM is true")
def test_withdraw_fee_when_available_head_id_of_deposit_is_same_as_deposit_id(self):
"""
Given: There are four deposits. Only the third deposit has enough long term.
When : Available head id of deposit is same as deposit id.
Then : Searches for next deposit id which is available to use deposit
and where expires of the deposit is more than current block height.
In the test, only the third deposit is available.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height += 1
if i != cnt_deposit - 2:
term = FeeEngine._MIN_DEPOSIT_TERM
else:
term = FeeEngine._MAX_DEPOSIT_TERM
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_deposit, arr_tx_hash[0])
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[0],
block_height + FeeEngine._MAX_DEPOSIT_TERM // 2)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_deposit, arr_tx_hash[len(arr_tx_hash) - 2])
@unittest.skipIf(FIXED_TERM is True, "FIXED_TERM is true")
def test_withdraw_fee_to_check_setting_on_next_max_expires(self):
"""
Given: There are four deposits.
When : Expires of the withdrawal deposit is same as expires.
Then : Searches for max expires which is more than current block height.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
last_expires = 0
org_last_expires = 0
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height += 1
if i != 0:
term = FeeEngine._MIN_DEPOSIT_TERM
if block_height + term > last_expires:
last_expires = block_height + term
else:
term = FeeEngine._MAX_DEPOSIT_TERM
org_last_expires = block_height + term
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, arr_tx_hash[0])
self.assertEqual(deposit_meta.expires_of_virtual_step, org_last_expires)
self.assertEqual(deposit_meta.expires_of_deposit, org_last_expires)
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[0],
block_height + FeeEngine._MIN_DEPOSIT_TERM // 2)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.expires_of_virtual_step, last_expires)
self.assertEqual(deposit_meta.expires_of_deposit, last_expires)
@unittest.skipIf(FIXED_TERM is True, "FIXED_TERM is true")
def test_withdraw_fee_of_last_deposit_to_check_setting_on_next_max_expires(self):
"""
Given: There are four deposits.
When : Expires of the withdrawal deposit which is the last one is same as expires.
Then : Searches for max expires which is more than current block height.
"""
context = self.get_context()
cnt_deposit = 4
block_height = randrange(100, 10000)
arr_tx_hash = []
last_expires = 0
org_last_expires = 0
for i in range(cnt_deposit):
arr_tx_hash.append(os.urandom(32))
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height += 1
if i != cnt_deposit-1:
term = FeeEngine._MIN_DEPOSIT_TERM
if block_height + term > last_expires:
last_expires = block_height + term
else:
term = FeeEngine._MAX_DEPOSIT_TERM
org_last_expires = block_height + term
self._engine.add_deposit(
context, arr_tx_hash[i], self._sender, self._score_address, amount, block_height, term)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.available_head_id_of_virtual_step, arr_tx_hash[0])
self.assertEqual(deposit_meta.available_head_id_of_deposit, arr_tx_hash[0])
self.assertEqual(deposit_meta.expires_of_virtual_step, org_last_expires)
self.assertEqual(deposit_meta.expires_of_deposit, org_last_expires)
# Withdraws the last one
self._engine.withdraw_deposit(context, self._sender, arr_tx_hash[cnt_deposit - 1],
block_height + FeeEngine._MIN_DEPOSIT_TERM // 2)
deposit_meta = self._engine._get_or_create_deposit_meta(context, self._score_address)
self.assertEqual(deposit_meta.expires_of_virtual_step, last_expires)
self.assertEqual(deposit_meta.expires_of_deposit, last_expires)
def test_get_deposit_info(self):
context = self.get_context()
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
before_sender_balance = self._icx_engine.get_balance(context, self._sender)
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
after_sender_balance = self._icx_engine.get_balance(context, self._sender)
self.assertEqual(amount, before_sender_balance - after_sender_balance)
deposit = self._engine.get_deposit(context, tx_hash)
self.assertEqual(tx_hash, deposit.id)
self.assertEqual(self._score_address, deposit.score_address)
self.assertEqual(self._sender, deposit.sender)
self.assertEqual(amount, deposit.deposit_amount)
self.assertEqual(block_height, deposit.created)
self.assertEqual(block_height + term, deposit.expires)
def test_charge_transaction_fee_without_sharing(self):
context = self.get_context()
step_price = 10 ** 10
used_step = 10 ** 10
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
before_sender_balance = self._icx_engine.get_balance(context, self._sender)
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, block_height)
after_sender_balance = self._icx_engine.get_balance(context, self._sender)
self.assertEqual(step_price * used_step, before_sender_balance - after_sender_balance)
def test_charge_transaction_fee_sharing_deposit(self):
context = self.get_context()
step_price = 10 ** 10
used_step = 10 ** 10
tx_hash = os.urandom(32)
amount = randrange(FeeEngine._MIN_DEPOSIT_AMOUNT, FeeEngine._MAX_DEPOSIT_AMOUNT)
block_height = randrange(100, 10000)
term = get_rand_term()
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
ratio = 50
context.fee_sharing_proportion = ratio
before_sender_balance = self._icx_engine.get_balance(context, self._sender)
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, block_height)
after_sender_balance = self._icx_engine.get_balance(context, self._sender)
score_charging_step = used_step * ratio // 100
sender_charging_step = used_step - score_charging_step
self.assertEqual(
step_price * sender_charging_step, before_sender_balance - after_sender_balance)
def test_charge_fee_from_score_by_virtual_step_single_deposit(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
When : Current block is 120 so 1st deposit is unavailable
Then : Pays fee by virtual step of 2nd.
update indices to 2nd
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 100),
(os.urandom(32), 50, 180, 100, 100),
(os.urandom(32), 70, 150, 100, 100),
(os.urandom(32), 90, 250, 100, 100),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 80
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
before_virtual_step = deposit_info.available_virtual_step
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(used_step, before_virtual_step - after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(deposits[1][0], deposit_meta.available_head_id_of_virtual_step)
def test_charge_fee_from_score_by_virtual_step_single_deposit_next_head(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
When : Current block is 120 so 1st deposit is unavailable
Then : Pays fee by virtual step of 2nd.
the virtual steps in 2nd are fully consumed
update indices to 3rd
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 100),
(os.urandom(32), 50, 180, 100, 100),
(os.urandom(32), 70, 150, 100, 100),
(os.urandom(32), 90, 250, 100, 100),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 100
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
before_virtual_step = deposit_info.available_virtual_step
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(used_step, before_virtual_step - after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(deposits[2][0], deposit_meta.available_head_id_of_virtual_step)
def test_charge_fee_from_score_by_virtual_step__single_deposit_next_head_next_expire(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
When : Current block is 190 so 4th, 5th deposits are available
Then : Pays fee by virtual step of 4th.
the virtual steps in 4th are fully consumed
update indices to 5th
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 100),
(os.urandom(32), 50, 180, 100, 100),
(os.urandom(32), 70, 150, 100, 100),
(os.urandom(32), 90, 250, 100, 100),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 190
used_step = 100
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
before_virtual_step = deposit_info.available_virtual_step
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(used_step, before_virtual_step - after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(deposits[4][0], deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(deposits[4][2], deposit_meta.expires_of_virtual_step)
def test_charge_fee_from_score_by_virtual_step__single_deposit_next_head_next_expire_none(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
When : Current block is 210 so only 4th deposit is available
Then : Pays fee by virtual step of 4th.
the virtual steps in 4th are fully consumed
should update indices but there are no more available deposits
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 100),
(os.urandom(32), 50, 180, 100, 100),
(os.urandom(32), 70, 150, 100, 100),
(os.urandom(32), 90, 250, 100, 100),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 210
used_step = 100
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
before_virtual_step = deposit_info.available_virtual_step
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(used_step, before_virtual_step - after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
def test_charge_fee_from_score_by_virtual_step_multiple_deposit(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
When : Current block is 120 so 1st deposit is unavailable
Then : Pays fee by virtual step through 2nd, 3rd, 4th.
the virtual steps in 2nd, 3rd are fully consumed
update indices to 4th
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 100),
(os.urandom(32), 50, 180, 100, 100),
(os.urandom(32), 70, 150, 100, 100),
(os.urandom(32), 90, 250, 100, 100),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 250
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
before_virtual_step = deposit_info.available_virtual_step
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(used_step, before_virtual_step - after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(deposits[3][0], deposit_meta.available_head_id_of_virtual_step)
def test_charge_fee_from_score_by_combine_by_single_deposit(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 5th deposit
When : Current block is 120 so 1st deposit is unavailable
Remaining virtual steps are not enough to pay fees
Then : Pays fee by virtual step first.
Pays remaining fee by deposit of 2nd
update indices to 2nd
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 0),
(os.urandom(32), 110, 200, 100, 50)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 70
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
self.assertEqual(deposits[1][0], deposit_meta.available_head_id_of_deposit)
def test_charge_fee_from_score_by_combine_next_head(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 5th deposit
When : Current block is 120 so 1st deposit is unavailable
Remaining virtual steps are not enough to pay fees
Then : Pays fee by virtual step first.
Pays remaining fee by deposit of 2nd
2nd deposit is fully consumed so update indices to 3rd
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 0),
(os.urandom(32), 110, 200, 100, 50)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 140
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
self.assertEqual(deposits[2][0], deposit_meta.available_head_id_of_deposit)
def test_charge_fee_from_score_by_combine_next_head_next_expire(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 5th deposit
When : Current block is 190 so 1st, 2nd, 3rd deposits are unavailable
Remaining virtual steps are not enough to pay fees
Then : Pays fee by virtual step first.
Pays remaining fee by deposit of 4th
4th deposit is fully consumed so update indices to 5th
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 0),
(os.urandom(32), 110, 200, 100, 50)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 190
used_step = 140
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
self.assertEqual(deposits[4][0], deposit_meta.available_head_id_of_deposit)
self.assertEqual(deposits[4][2], deposit_meta.expires_of_deposit)
def test_charge_fee_from_score_by_combine_next_head_next_expire_none(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 4th and 5th deposit
When : Current block is 220 so 5th deposit is unavailable
Remaining virtual steps are not enough to pay fees
Then : Pays fee by virtual step first.
Pays remaining fee by deposit of 4th
All available deposits are consumed so make the SCORE disabled
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 50),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 220
used_step = 140
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
self.assertEqual(None, deposit_meta.available_head_id_of_deposit)
self.assertEqual(-1, deposit_meta.expires_of_deposit)
def test_charge_fee_from_score_by_combine_multiple_deposit(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 5th deposit
When : Current block is 120 so 1st deposit is unavailable
Remaining virtual steps are not enough to pay fees
Then : Pays fee by virtual step first.
Pays remaining fee by deposit through 2nd and 3rd deposit.
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 0),
(os.urandom(32), 110, 200, 100, 50)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 120
used_step = 230
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
# Asserts indices are updated
self.assertEqual(deposits[3][0], deposit_meta.available_head_id_of_deposit)
self.assertEqual(deposits[3][2], deposit_meta.expires_of_deposit)
def test_charge_fee_from_score_by_combine_additional_pay(self):
"""
Given: Five deposits. The fourth deposit is the max expire.
Remaining virtual steps are in 4th and 5th deposit
When : Current block is 220 so 5th deposit is unavailable
Remaining virtual steps are not enough to pay fees
Available deposits are also not enough to pay fees
Then : Pays fees regardless minimum remaining amount
and make the SCORE disabled
"""
context = self.get_context()
# tx_hash, from_block, to_block, deposit_amount, virtual_step_amount
deposits = [
(os.urandom(32), 10, 100, 100, 0),
(os.urandom(32), 50, 180, 100, 0),
(os.urandom(32), 70, 150, 100, 0),
(os.urandom(32), 90, 250, 100, 50),
(os.urandom(32), 110, 200, 100, 100)
]
self._set_up_deposits(context, deposits)
step_price = 1
current_block = 220
used_step = 150
self._engine.charge_transaction_fee(
context, self._sender, self._score_address, step_price, used_step, current_block)
deposit_info = self._engine.get_deposit_info(context, self._score_address, current_block)
after_virtual_step = deposit_info.available_virtual_step
self.assertEqual(0, after_virtual_step)
deposit_meta = self.fee_storage.get_deposit_meta(context, self._score_address)
# Asserts virtual step disabled
self.assertEqual(None, deposit_meta.available_head_id_of_virtual_step)
self.assertEqual(-1, deposit_meta.expires_of_virtual_step)
# Asserts deposit disabled
self.assertEqual(None, deposit_meta.available_head_id_of_deposit)
self.assertEqual(-1, deposit_meta.expires_of_deposit)
def _set_up_deposits(self, context, deposits):
context.fee_sharing_proportion = 100
self._engine._MIN_DEPOSIT_TERM = 50
self._engine._MIN_DEPOSIT_AMOUNT = 10
for deposit in deposits:
tx_hash = deposit[0]
amount = deposit[3]
block_height = deposit[1]
term = deposit[2] - block_height
# self._engine._calculate_virtual_step_issuance = Mock(return_value=deposit[4])
VirtualStepCalculator.calculate_virtual_step = Mock(return_value=deposit[4])
self._engine.add_deposit(
context, tx_hash, self._sender, self._score_address, amount, block_height, term)
| 43.265692
| 117
| 0.666759
| 6,251
| 50,318
| 5.029755
| 0.053271
| 0.040234
| 0.044273
| 0.03365
| 0.840877
| 0.822684
| 0.811902
| 0.802169
| 0.795267
| 0.782513
| 0
| 0.032482
| 0.25478
| 50,318
| 1,162
| 118
| 43.302926
| 0.80599
| 0.140546
| 0
| 0.695652
| 0
| 0
| 0.004761
| 0
| 0
| 0
| 0
| 0
| 0.157609
| 1
| 0.058424
| false
| 0
| 0.029891
| 0.002717
| 0.099185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a35cbc9a1ccb3c1724b95083e1001a7a1ddf7fe8
| 6,563
|
py
|
Python
|
identify_shapes/tests/center_of_shape.py
|
SPLAYER-HD/ImageDetect-Colors-Shapes
|
eddea8189760b7326a2989cb4a90fa1b183ff2ee
|
[
"MIT"
] | null | null | null |
identify_shapes/tests/center_of_shape.py
|
SPLAYER-HD/ImageDetect-Colors-Shapes
|
eddea8189760b7326a2989cb4a90fa1b183ff2ee
|
[
"MIT"
] | 1
|
2020-02-17T10:34:05.000Z
|
2020-02-17T10:34:05.000Z
|
identify_shapes/tests/center_of_shape.py
|
SPLAYER-HD/ImageDetect-Colors-Shapes
|
eddea8189760b7326a2989cb4a90fa1b183ff2ee
|
[
"MIT"
] | null | null | null |
# import the necessary packages
import argparse
import imutils
import cv2
from PIL import Image
import numpy as np
from random import randint
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", required=True,
help="path to the input image")
args = vars(ap.parse_args())
print ('cv2.__version__')
print (cv2.__version__)
# load the image, convert it to grayscale, blur it slightly,
# and threshold it
image = cv2.imread(args["image"])
img = Image.open(args["image"])
array = np.array(img)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
invimg = Image.fromarray(gray)
invimg.save('shades-of-grey_gray.png')
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
invimg = Image.fromarray(blurred)
invimg.save('shades-of-grey_blurred.png')
thresh = cv2.threshold(gray, 60, 80, cv2.THRESH_BINARY_INV)[1]
invimg = Image.fromarray(thresh)
invimg.save('shades-of-grey_thresh.png')
# find contours in the thresholded image
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
#(_, cnts, _) = imutils.grab_contours(cnts)
print('cnts')
print(len(cnts))
print(len(cnts[0]))
#cv2.drawContours(image, cnts, -1, (240, 0, 159), 3)
#cv2.imshow("Image", image)
#cv2.waitKey(0)
for c in cnts:
# compute the center of the contour
M = cv2.moments(c)
print('M')
#print(M)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# draw the contour and center of the shape on the image
cv2.drawContours(image, [c], -1, (0, 255, 0), 2)
cv2.circle(image, (cX, cY), 7, (255, 255, 255), -1)
cv2.putText(image, "center", (cX - 20, cY - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# show the image
#cv2.imshow("Image", image)
#cv2.waitKey(0)
#invimg = Image.fromarray(dst)
#invimg.save('shades-of-grey_border.png')
#image = cv2.imread("shades-of-grey_border.png")
#img = Image.open('shades-of-grey_border.png')
array = np.array(img)
'''
for row_index, line in enumerate(array):
#print (line)
for column_index, pixel in enumerate(line):
if(pixel[0] ==200):
array[row_index][column_index]=[0, 0, 0]
#21, 214, 234
'''
for row_index, line in enumerate(array):
#print (line)
for column_index, pixel in enumerate(line):
if(pixel[0] ==255):
array[row_index][column_index]=[21, 214, 234]
for row_index, line in enumerate(array):
#print (line)
for column_index, pixel in enumerate(line):
if((row_index ==0 or row_index == len(array)-1)and (column_index == 1 or len(array[0])-1)):
#print('entro0')
array[row_index][column_index]=[255, 255, 255]
if((row_index ==0 or row_index == len(array)-1)and (column_index == 0 or len(array[0])-1)):
#print('entro')
array[row_index][column_index]=[255, 255, 255]
if((row_index ==1 or row_index == len(array)-2) and (column_index == 1 or len(array[0])-2)):
#print('entro2')
array[row_index][column_index]=[255, 255, 255]
if(row_index == len(array)-3 and len(array[0])-3):
#print('entro3')
array[row_index][column_index]=[255, 255, 255]
invimg = Image.fromarray(array)
invimg.save('shades-of-grey_red.png')
image = cv2.imread('shades-of-grey_red.png')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
invimg.save('shades-of-grey_gray.png')
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
invimg = Image.fromarray(blurred)
invimg.save('shades-of-grey_blurred.png')
##################444
'''
thresh = cv2.threshold(gray, 60, 80, cv2.THRESH_BINARY_INV)[1]
invimg = Image.fromarray(thresh)
invimg.save('shades-of-grey_thresh.png')
# find contours in the thresholded image
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
#(_, cnts, _) = imutils.grab_contours(cnts)
print('cnts')
print(len(cnts))
print(len(cnts[0]))
#cv2.drawContours(image, cnts, -1, (240, 0, 159), 3)
#cv2.imshow("Image", image)
#cv2.waitKey(0)
for c in cnts:
# compute the center of the contour
M = cv2.moments(c)
print('M')
#print(M)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# draw the contour and center of the shape on the image
cv2.drawContours(image, [c], -1, (0, 255, 0), 2)
cv2.circle(image, (cX, cY), 7, (255, 255, 255), -1)
cv2.putText(image, "center", (cX - 20, cY - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# show the image
cv2.imshow("Image", image)
cv2.waitKey(0)
'''
######################444
thresh = cv2.threshold(gray, 210, 255, cv2.RETR_FLOODFILL)[1]
invimg = Image.fromarray(thresh)
invimg.save('shades-of-grey_thresh.png')
# find contours in the thresholded image
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_LIST,cv2.CHAIN_APPROX_NONE)
#(_, cnts, _) = imutils.grab_contours(cnts)
print('cnts')
print(len(cnts))
#print(len(cnts[0]))
#print(cnts)
#cv2.drawContours(image, cnts, -1, (240, 0, 159), 3)
#cv2.imshow("Image", image)
#cv2.waitKey(0)
print(len(cnts))
#print(len(cnts[0]))
# loop over the contours
for c in cnts:
# compute the center of the contour
M = cv2.moments(c)
print('M')
#print(M)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# draw the contour and center of the shape on the image
cv2.drawContours(image, [c], -1, (0, 255, 0), 2)
cv2.circle(image, (cX, cY), 7, (255, 255, 255), -1)
cv2.putText(image, "center", (cX - 20, cY - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# show the image
cv2.imshow("Image", image)
cv2.waitKey(0)
####################################
thresh = cv2.threshold(gray, 255, 0, cv2.THRESH_TRUNC)[1]
invimg = Image.fromarray(thresh)
invimg.save('shades-of-grey_thresh.png')
# find contours in the thresholded image
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
#(_, cnts, _) = imutils.grab_contours(cnts)
print('cnts')
print(len(cnts))
#print(len(cnts[0]))
#print(cnts)
#cv2.drawContours(image, cnts, -1, (240, 0, 159), 3)
#cv2.imshow("Image", image)
#cv2.waitKey(0)
print(len(cnts))
#print(len(cnts[0]))
# loop over the contours
for c in cnts:
# compute the center of the contour
M = cv2.moments(c)
print('M')
#print(M)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# draw the contour and center of the shape on the image
cv2.drawContours(image, [c], -1, (0, 255, 0), 2)
cv2.circle(image, (cX, cY), 7, (255, 255, 255), -1)
cv2.putText(image, "center", (cX - 20, cY - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
# show the image
cv2.imshow("Image", image)
cv2.waitKey(0)
| 30.957547
| 100
| 0.655341
| 1,043
| 6,563
| 4.032598
| 0.134228
| 0.034237
| 0.03709
| 0.042796
| 0.847836
| 0.804327
| 0.798859
| 0.785069
| 0.749643
| 0.749643
| 0
| 0.073874
| 0.15435
| 6,563
| 211
| 101
| 31.104265
| 0.683964
| 0.219564
| 0
| 0.677083
| 0
| 0
| 0.092408
| 0.056806
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0.145833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a36e74443a1e748ceabf1db23e8b7644ebc230db
| 81
|
py
|
Python
|
destructify/parsing/__init__.py
|
mvdnes/destructify
|
eb37ee3465da429685a8301ec00b4a63cd375561
|
[
"MIT"
] | 7
|
2018-06-04T13:47:59.000Z
|
2021-01-13T19:40:32.000Z
|
destructify/parsing/__init__.py
|
mvdnes/destructify
|
eb37ee3465da429685a8301ec00b4a63cd375561
|
[
"MIT"
] | 1
|
2021-02-08T10:35:14.000Z
|
2021-02-08T10:35:14.000Z
|
destructify/parsing/__init__.py
|
mvdnes/destructify
|
eb37ee3465da429685a8301ec00b4a63cd375561
|
[
"MIT"
] | 2
|
2020-11-30T22:00:16.000Z
|
2021-07-10T09:45:49.000Z
|
from .context import *
from .streams import *
from .expression import this, len_
| 20.25
| 34
| 0.765432
| 11
| 81
| 5.545455
| 0.636364
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 3
| 35
| 27
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a383c5af1a3c373717d86976e54e385145c82f87
| 93
|
py
|
Python
|
pwlf/__init__.py
|
alexlib/piecewise_linear_fit_py
|
cb32e331690e668b374a54f890eac549d884b2fe
|
[
"MIT"
] | 199
|
2017-10-31T10:26:15.000Z
|
2022-03-30T09:16:52.000Z
|
pwlf/__init__.py
|
alexlib/piecewise_linear_fit_py
|
cb32e331690e668b374a54f890eac549d884b2fe
|
[
"MIT"
] | 79
|
2017-10-31T10:26:12.000Z
|
2022-03-31T18:46:24.000Z
|
pwlf/__init__.py
|
alexlib/piecewise_linear_fit_py
|
cb32e331690e668b374a54f890eac549d884b2fe
|
[
"MIT"
] | 54
|
2017-11-09T06:50:34.000Z
|
2022-03-09T06:15:54.000Z
|
from .pwlf import PiecewiseLinFit # noqa F401
from .version import __version__ # noqa F401
| 31
| 46
| 0.784946
| 12
| 93
| 5.75
| 0.583333
| 0.231884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077922
| 0.172043
| 93
| 2
| 47
| 46.5
| 0.818182
| 0.204301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6ea411ff7d905a647821002b7f1011156a384d50
| 38,085
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int16e/96.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int16e/96.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int16e/96.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 3683
passenger_arriving = (
(3, 11, 2, 7, 3, 0, 6, 8, 4, 4, 1, 0), # 0
(4, 9, 10, 6, 2, 0, 13, 9, 4, 6, 2, 0), # 1
(7, 8, 11, 3, 5, 0, 6, 4, 8, 9, 6, 0), # 2
(7, 7, 10, 2, 2, 0, 9, 7, 7, 5, 2, 0), # 3
(1, 8, 8, 4, 3, 0, 13, 7, 6, 4, 2, 0), # 4
(3, 4, 9, 2, 0, 0, 5, 9, 6, 1, 0, 0), # 5
(4, 11, 14, 2, 1, 0, 9, 9, 10, 8, 3, 0), # 6
(7, 9, 11, 3, 0, 0, 12, 10, 6, 8, 0, 0), # 7
(6, 11, 11, 6, 1, 0, 5, 11, 6, 2, 5, 0), # 8
(5, 7, 9, 9, 0, 0, 8, 13, 8, 2, 3, 0), # 9
(4, 12, 7, 7, 2, 0, 6, 10, 7, 5, 3, 0), # 10
(4, 8, 9, 7, 1, 0, 6, 10, 9, 8, 2, 0), # 11
(4, 7, 8, 8, 4, 0, 6, 13, 13, 1, 3, 0), # 12
(4, 6, 5, 4, 1, 0, 9, 13, 10, 4, 3, 0), # 13
(3, 5, 8, 4, 1, 0, 9, 12, 5, 10, 3, 0), # 14
(5, 9, 12, 2, 4, 0, 8, 9, 6, 7, 2, 0), # 15
(7, 7, 6, 7, 2, 0, 7, 15, 8, 3, 3, 0), # 16
(5, 11, 13, 2, 5, 0, 11, 9, 8, 6, 3, 0), # 17
(3, 16, 11, 3, 4, 0, 8, 13, 6, 3, 2, 0), # 18
(5, 12, 15, 4, 0, 0, 10, 10, 11, 4, 3, 0), # 19
(3, 9, 15, 11, 3, 0, 8, 12, 4, 4, 2, 0), # 20
(3, 7, 6, 4, 2, 0, 7, 8, 12, 6, 2, 0), # 21
(6, 12, 10, 2, 2, 0, 10, 6, 8, 8, 3, 0), # 22
(4, 9, 6, 4, 1, 0, 4, 10, 7, 8, 1, 0), # 23
(2, 10, 13, 7, 1, 0, 12, 8, 7, 4, 3, 0), # 24
(6, 9, 14, 4, 3, 0, 5, 15, 4, 5, 1, 0), # 25
(5, 17, 6, 5, 2, 0, 6, 10, 8, 9, 3, 0), # 26
(5, 8, 13, 6, 3, 0, 5, 11, 5, 6, 3, 0), # 27
(3, 13, 9, 7, 2, 0, 10, 14, 4, 2, 3, 0), # 28
(6, 12, 4, 3, 3, 0, 9, 8, 6, 9, 2, 0), # 29
(4, 15, 6, 6, 5, 0, 10, 8, 4, 5, 0, 0), # 30
(2, 9, 5, 1, 1, 0, 8, 15, 7, 3, 3, 0), # 31
(3, 9, 7, 6, 4, 0, 8, 9, 7, 4, 2, 0), # 32
(6, 18, 6, 3, 3, 0, 5, 11, 3, 5, 1, 0), # 33
(4, 6, 7, 3, 2, 0, 14, 11, 7, 6, 3, 0), # 34
(6, 8, 13, 6, 3, 0, 5, 7, 4, 2, 3, 0), # 35
(4, 8, 5, 2, 2, 0, 7, 9, 6, 6, 3, 0), # 36
(8, 7, 6, 2, 4, 0, 16, 13, 6, 4, 3, 0), # 37
(6, 13, 4, 6, 1, 0, 11, 9, 10, 4, 3, 0), # 38
(2, 14, 10, 5, 3, 0, 6, 11, 10, 6, 2, 0), # 39
(6, 9, 9, 7, 1, 0, 7, 9, 8, 4, 0, 0), # 40
(5, 9, 9, 2, 5, 0, 8, 4, 9, 4, 0, 0), # 41
(2, 17, 5, 1, 1, 0, 6, 13, 6, 9, 1, 0), # 42
(7, 16, 8, 3, 4, 0, 4, 10, 5, 9, 2, 0), # 43
(2, 14, 13, 2, 0, 0, 11, 13, 2, 3, 3, 0), # 44
(8, 11, 13, 3, 2, 0, 4, 12, 6, 9, 2, 0), # 45
(10, 12, 8, 6, 2, 0, 6, 7, 4, 3, 2, 0), # 46
(3, 8, 11, 6, 0, 0, 9, 4, 7, 7, 4, 0), # 47
(6, 11, 11, 5, 2, 0, 8, 9, 8, 6, 2, 0), # 48
(1, 9, 10, 6, 1, 0, 6, 13, 8, 9, 3, 0), # 49
(2, 9, 6, 1, 4, 0, 6, 10, 3, 3, 1, 0), # 50
(7, 16, 8, 7, 2, 0, 4, 6, 6, 6, 5, 0), # 51
(3, 10, 11, 3, 2, 0, 6, 7, 10, 4, 2, 0), # 52
(5, 13, 8, 3, 2, 0, 8, 9, 7, 3, 3, 0), # 53
(6, 13, 6, 8, 3, 0, 6, 9, 11, 6, 2, 0), # 54
(1, 16, 5, 7, 1, 0, 8, 8, 5, 4, 4, 0), # 55
(7, 5, 15, 5, 2, 0, 4, 10, 4, 4, 3, 0), # 56
(8, 16, 5, 4, 0, 0, 3, 6, 10, 6, 3, 0), # 57
(1, 6, 8, 1, 2, 0, 10, 11, 5, 8, 3, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(4.239442493415277, 10.874337121212122, 12.79077763496144, 10.138043478260869, 11.428846153846154, 7.610869565217392), # 0
(4.27923521607648, 10.995266557940518, 12.859864860039991, 10.194503019323673, 11.51450641025641, 7.608275422705315), # 1
(4.318573563554774, 11.114402244668911, 12.927312196515281, 10.249719806763286, 11.598358974358975, 7.60560193236715), # 2
(4.357424143985952, 11.231615625000002, 12.993070372750644, 10.303646739130434, 11.680326923076926, 7.60284945652174), # 3
(4.395753565505805, 11.346778142536477, 13.057090117109396, 10.356236714975847, 11.760333333333335, 7.600018357487922), # 4
(4.433528436250122, 11.459761240881035, 13.11932215795487, 10.407442632850241, 11.838301282051281, 7.597108997584541), # 5
(4.470715364354698, 11.570436363636365, 13.179717223650389, 10.457217391304349, 11.914153846153846, 7.594121739130435), # 6
(4.507280957955322, 11.678674954405162, 13.238226042559269, 10.50551388888889, 11.987814102564105, 7.591056944444445), # 7
(4.543191825187787, 11.784348456790122, 13.294799343044847, 10.552285024154589, 12.059205128205129, 7.587914975845411), # 8
(4.578414574187884, 11.88732831439394, 13.34938785347044, 10.597483695652175, 12.12825, 7.584696195652175), # 9
(4.612915813091406, 11.987485970819305, 13.401942302199371, 10.64106280193237, 12.194871794871796, 7.581400966183574), # 10
(4.646662150034143, 12.084692869668913, 13.452413417594972, 10.682975241545895, 12.25899358974359, 7.578029649758455), # 11
(4.679620193151888, 12.178820454545454, 13.500751928020566, 10.723173913043478, 12.320538461538462, 7.574582608695652), # 12
(4.71175655058043, 12.26974016905163, 13.546908561839473, 10.761611714975846, 12.37942948717949, 7.5710602053140095), # 13
(4.743037830455566, 12.357323456790127, 13.590834047415022, 10.798241545893719, 12.435589743589743, 7.567462801932367), # 14
(4.773430640913081, 12.441441761363635, 13.632479113110538, 10.833016304347826, 12.488942307692309, 7.563790760869566), # 15
(4.802901590088772, 12.521966526374861, 13.671794487289347, 10.86588888888889, 12.539410256410257, 7.560044444444445), # 16
(4.831417286118428, 12.598769195426486, 13.708730898314768, 10.896812198067634, 12.586916666666667, 7.556224214975846), # 17
(4.8589443371378405, 12.671721212121213, 13.74323907455013, 10.925739130434785, 12.631384615384619, 7.552330434782609), # 18
(4.8854493512828014, 12.740694020061728, 13.775269744358756, 10.952622584541063, 12.67273717948718, 7.5483634661835755), # 19
(4.910898936689104, 12.805559062850728, 13.804773636103969, 10.9774154589372, 12.710897435897436, 7.544323671497584), # 20
(4.935259701492538, 12.866187784090906, 13.831701478149103, 11.000070652173914, 12.74578846153846, 7.540211413043479), # 21
(4.958498253828894, 12.922451627384962, 13.856003998857469, 11.020541062801932, 12.777333333333331, 7.5360270531400975), # 22
(4.980581201833967, 12.97422203633558, 13.877631926592404, 11.038779589371982, 12.805455128205129, 7.531770954106282), # 23
(5.001475153643547, 13.021370454545455, 13.896535989717222, 11.054739130434783, 12.830076923076923, 7.52744347826087), # 24
(5.0211467173934246, 13.063768325617284, 13.91266691659526, 11.068372584541065, 12.851121794871794, 7.523044987922706), # 25
(5.039562501219393, 13.101287093153758, 13.925975435589832, 11.079632850241545, 12.86851282051282, 7.518575845410628), # 26
(5.056689113257243, 13.133798200757575, 13.936412275064265, 11.088472826086958, 12.88217307692308, 7.514036413043479), # 27
(5.072493161642767, 13.161173092031426, 13.943928163381893, 11.09484541062802, 12.89202564102564, 7.509427053140097), # 28
(5.086941254511755, 13.183283210578004, 13.948473828906026, 11.09870350241546, 12.89799358974359, 7.504748128019324), # 29
(5.1000000000000005, 13.200000000000001, 13.950000000000001, 11.100000000000001, 12.9, 7.5), # 30
(5.112219245524297, 13.213886079545453, 13.948855917874395, 11.099765849673204, 12.89926985815603, 7.4934020156588375), # 31
(5.124174680306906, 13.227588636363638, 13.945456038647343, 11.099067973856208, 12.897095035460993, 7.483239613526571), # 32
(5.135871675191815, 13.241105965909092, 13.93984891304348, 11.097913235294119, 12.893498936170213, 7.469612293853072), # 33
(5.147315601023018, 13.254436363636366, 13.93208309178744, 11.096308496732028, 12.888504964539008, 7.452619556888223), # 34
(5.158511828644501, 13.267578124999998, 13.922207125603865, 11.094260620915033, 12.882136524822696, 7.432360902881893), # 35
(5.169465728900256, 13.280529545454549, 13.91026956521739, 11.091776470588236, 12.874417021276598, 7.408935832083959), # 36
(5.180182672634271, 13.293288920454547, 13.896318961352657, 11.088862908496733, 12.865369858156027, 7.382443844744294), # 37
(5.190668030690537, 13.305854545454546, 13.8804038647343, 11.08552679738562, 12.855018439716313, 7.352984441112776), # 38
(5.200927173913044, 13.318224715909091, 13.862572826086955, 11.081775, 12.843386170212765, 7.32065712143928), # 39
(5.21096547314578, 13.330397727272729, 13.842874396135267, 11.077614379084968, 12.830496453900707, 7.285561385973679), # 40
(5.220788299232737, 13.342371874999998, 13.821357125603866, 11.073051797385622, 12.816372695035462, 7.247796734965852), # 41
(5.230401023017903, 13.354145454545458, 13.798069565217393, 11.068094117647059, 12.801038297872342, 7.207462668665667), # 42
(5.239809015345269, 13.365716761363636, 13.773060265700483, 11.06274820261438, 12.784516666666667, 7.164658687323005), # 43
(5.249017647058824, 13.377084090909092, 13.746377777777779, 11.05702091503268, 12.76683120567376, 7.119484291187739), # 44
(5.258032289002557, 13.388245738636364, 13.718070652173916, 11.050919117647059, 12.748005319148938, 7.072038980509745), # 45
(5.266858312020461, 13.399200000000002, 13.688187439613529, 11.044449673202614, 12.72806241134752, 7.022422255538898), # 46
(5.275501086956522, 13.409945170454547, 13.656776690821255, 11.037619444444445, 12.707025886524825, 6.970733616525071), # 47
(5.283965984654732, 13.420479545454548, 13.623886956521739, 11.030435294117646, 12.68491914893617, 6.9170725637181425), # 48
(5.292258375959079, 13.430801420454543, 13.589566787439615, 11.022904084967323, 12.66176560283688, 6.861538597367982), # 49
(5.300383631713555, 13.440909090909088, 13.553864734299518, 11.015032679738564, 12.63758865248227, 6.804231217724471), # 50
(5.308347122762149, 13.450800852272728, 13.516829347826087, 11.006827941176471, 12.612411702127659, 6.7452499250374816), # 51
(5.316154219948849, 13.460475, 13.47850917874396, 10.998296732026144, 12.58625815602837, 6.684694219556889), # 52
(5.3238102941176475, 13.469929829545457, 13.438952777777779, 10.98944591503268, 12.559151418439718, 6.622663601532567), # 53
(5.331320716112533, 13.479163636363635, 13.398208695652173, 10.980282352941177, 12.531114893617023, 6.559257571214393), # 54
(5.338690856777493, 13.488174715909091, 13.356325483091787, 10.970812908496733, 12.502171985815604, 6.494575628852241), # 55
(5.3459260869565215, 13.496961363636363, 13.313351690821257, 10.961044444444445, 12.472346099290782, 6.428717274695986), # 56
(5.353031777493607, 13.505521875000003, 13.269335869565218, 10.950983823529413, 12.441660638297872, 6.361782008995502), # 57
(5.360013299232737, 13.513854545454544, 13.224326570048309, 10.940637908496733, 12.410139007092198, 6.293869332000667), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(3, 11, 2, 7, 3, 0, 6, 8, 4, 4, 1, 0), # 0
(7, 20, 12, 13, 5, 0, 19, 17, 8, 10, 3, 0), # 1
(14, 28, 23, 16, 10, 0, 25, 21, 16, 19, 9, 0), # 2
(21, 35, 33, 18, 12, 0, 34, 28, 23, 24, 11, 0), # 3
(22, 43, 41, 22, 15, 0, 47, 35, 29, 28, 13, 0), # 4
(25, 47, 50, 24, 15, 0, 52, 44, 35, 29, 13, 0), # 5
(29, 58, 64, 26, 16, 0, 61, 53, 45, 37, 16, 0), # 6
(36, 67, 75, 29, 16, 0, 73, 63, 51, 45, 16, 0), # 7
(42, 78, 86, 35, 17, 0, 78, 74, 57, 47, 21, 0), # 8
(47, 85, 95, 44, 17, 0, 86, 87, 65, 49, 24, 0), # 9
(51, 97, 102, 51, 19, 0, 92, 97, 72, 54, 27, 0), # 10
(55, 105, 111, 58, 20, 0, 98, 107, 81, 62, 29, 0), # 11
(59, 112, 119, 66, 24, 0, 104, 120, 94, 63, 32, 0), # 12
(63, 118, 124, 70, 25, 0, 113, 133, 104, 67, 35, 0), # 13
(66, 123, 132, 74, 26, 0, 122, 145, 109, 77, 38, 0), # 14
(71, 132, 144, 76, 30, 0, 130, 154, 115, 84, 40, 0), # 15
(78, 139, 150, 83, 32, 0, 137, 169, 123, 87, 43, 0), # 16
(83, 150, 163, 85, 37, 0, 148, 178, 131, 93, 46, 0), # 17
(86, 166, 174, 88, 41, 0, 156, 191, 137, 96, 48, 0), # 18
(91, 178, 189, 92, 41, 0, 166, 201, 148, 100, 51, 0), # 19
(94, 187, 204, 103, 44, 0, 174, 213, 152, 104, 53, 0), # 20
(97, 194, 210, 107, 46, 0, 181, 221, 164, 110, 55, 0), # 21
(103, 206, 220, 109, 48, 0, 191, 227, 172, 118, 58, 0), # 22
(107, 215, 226, 113, 49, 0, 195, 237, 179, 126, 59, 0), # 23
(109, 225, 239, 120, 50, 0, 207, 245, 186, 130, 62, 0), # 24
(115, 234, 253, 124, 53, 0, 212, 260, 190, 135, 63, 0), # 25
(120, 251, 259, 129, 55, 0, 218, 270, 198, 144, 66, 0), # 26
(125, 259, 272, 135, 58, 0, 223, 281, 203, 150, 69, 0), # 27
(128, 272, 281, 142, 60, 0, 233, 295, 207, 152, 72, 0), # 28
(134, 284, 285, 145, 63, 0, 242, 303, 213, 161, 74, 0), # 29
(138, 299, 291, 151, 68, 0, 252, 311, 217, 166, 74, 0), # 30
(140, 308, 296, 152, 69, 0, 260, 326, 224, 169, 77, 0), # 31
(143, 317, 303, 158, 73, 0, 268, 335, 231, 173, 79, 0), # 32
(149, 335, 309, 161, 76, 0, 273, 346, 234, 178, 80, 0), # 33
(153, 341, 316, 164, 78, 0, 287, 357, 241, 184, 83, 0), # 34
(159, 349, 329, 170, 81, 0, 292, 364, 245, 186, 86, 0), # 35
(163, 357, 334, 172, 83, 0, 299, 373, 251, 192, 89, 0), # 36
(171, 364, 340, 174, 87, 0, 315, 386, 257, 196, 92, 0), # 37
(177, 377, 344, 180, 88, 0, 326, 395, 267, 200, 95, 0), # 38
(179, 391, 354, 185, 91, 0, 332, 406, 277, 206, 97, 0), # 39
(185, 400, 363, 192, 92, 0, 339, 415, 285, 210, 97, 0), # 40
(190, 409, 372, 194, 97, 0, 347, 419, 294, 214, 97, 0), # 41
(192, 426, 377, 195, 98, 0, 353, 432, 300, 223, 98, 0), # 42
(199, 442, 385, 198, 102, 0, 357, 442, 305, 232, 100, 0), # 43
(201, 456, 398, 200, 102, 0, 368, 455, 307, 235, 103, 0), # 44
(209, 467, 411, 203, 104, 0, 372, 467, 313, 244, 105, 0), # 45
(219, 479, 419, 209, 106, 0, 378, 474, 317, 247, 107, 0), # 46
(222, 487, 430, 215, 106, 0, 387, 478, 324, 254, 111, 0), # 47
(228, 498, 441, 220, 108, 0, 395, 487, 332, 260, 113, 0), # 48
(229, 507, 451, 226, 109, 0, 401, 500, 340, 269, 116, 0), # 49
(231, 516, 457, 227, 113, 0, 407, 510, 343, 272, 117, 0), # 50
(238, 532, 465, 234, 115, 0, 411, 516, 349, 278, 122, 0), # 51
(241, 542, 476, 237, 117, 0, 417, 523, 359, 282, 124, 0), # 52
(246, 555, 484, 240, 119, 0, 425, 532, 366, 285, 127, 0), # 53
(252, 568, 490, 248, 122, 0, 431, 541, 377, 291, 129, 0), # 54
(253, 584, 495, 255, 123, 0, 439, 549, 382, 295, 133, 0), # 55
(260, 589, 510, 260, 125, 0, 443, 559, 386, 299, 136, 0), # 56
(268, 605, 515, 264, 125, 0, 446, 565, 396, 305, 139, 0), # 57
(269, 611, 523, 265, 127, 0, 456, 576, 401, 313, 142, 0), # 58
(269, 611, 523, 265, 127, 0, 456, 576, 401, 313, 142, 0), # 59
)
passenger_arriving_rate = (
(4.239442493415277, 8.699469696969697, 7.674466580976864, 4.055217391304347, 2.2857692307692306, 0.0, 7.610869565217392, 9.143076923076922, 6.082826086956521, 5.1163110539845755, 2.174867424242424, 0.0), # 0
(4.27923521607648, 8.796213246352414, 7.715918916023995, 4.077801207729468, 2.3029012820512818, 0.0, 7.608275422705315, 9.211605128205127, 6.116701811594203, 5.1439459440159965, 2.1990533115881035, 0.0), # 1
(4.318573563554774, 8.891521795735128, 7.7563873179091685, 4.099887922705314, 2.3196717948717946, 0.0, 7.60560193236715, 9.278687179487179, 6.1498318840579715, 5.170924878606112, 2.222880448933782, 0.0), # 2
(4.357424143985952, 8.9852925, 7.795842223650386, 4.121458695652173, 2.336065384615385, 0.0, 7.60284945652174, 9.34426153846154, 6.18218804347826, 5.197228149100257, 2.246323125, 0.0), # 3
(4.395753565505805, 9.07742251402918, 7.834254070265637, 4.142494685990338, 2.352066666666667, 0.0, 7.600018357487922, 9.408266666666668, 6.213742028985508, 5.222836046843758, 2.269355628507295, 0.0), # 4
(4.433528436250122, 9.167808992704828, 7.8715932947729215, 4.1629770531400965, 2.367660256410256, 0.0, 7.597108997584541, 9.470641025641024, 6.244465579710145, 5.247728863181948, 2.291952248176207, 0.0), # 5
(4.470715364354698, 9.25634909090909, 7.907830334190233, 4.182886956521739, 2.382830769230769, 0.0, 7.594121739130435, 9.531323076923076, 6.274330434782609, 5.271886889460156, 2.3140872727272725, 0.0), # 6
(4.507280957955322, 9.34293996352413, 7.942935625535561, 4.2022055555555555, 2.397562820512821, 0.0, 7.591056944444445, 9.590251282051284, 6.303308333333334, 5.295290417023708, 2.3357349908810323, 0.0), # 7
(4.543191825187787, 9.427478765432097, 7.976879605826908, 4.220914009661835, 2.4118410256410256, 0.0, 7.587914975845411, 9.647364102564103, 6.3313710144927535, 5.317919737217938, 2.3568696913580243, 0.0), # 8
(4.578414574187884, 9.509862651515151, 8.009632712082263, 4.23899347826087, 2.4256499999999996, 0.0, 7.584696195652175, 9.702599999999999, 6.358490217391305, 5.339755141388175, 2.377465662878788, 0.0), # 9
(4.612915813091406, 9.589988776655444, 8.041165381319622, 4.256425120772947, 2.438974358974359, 0.0, 7.581400966183574, 9.755897435897436, 6.384637681159421, 5.360776920879748, 2.397497194163861, 0.0), # 10
(4.646662150034143, 9.66775429573513, 8.071448050556983, 4.273190096618357, 2.4517987179487175, 0.0, 7.578029649758455, 9.80719487179487, 6.409785144927537, 5.380965367037988, 2.4169385739337823, 0.0), # 11
(4.679620193151888, 9.743056363636363, 8.100451156812339, 4.289269565217391, 2.4641076923076923, 0.0, 7.574582608695652, 9.85643076923077, 6.433904347826087, 5.400300771208226, 2.4357640909090907, 0.0), # 12
(4.71175655058043, 9.815792135241303, 8.128145137103683, 4.304644685990338, 2.475885897435898, 0.0, 7.5710602053140095, 9.903543589743592, 6.456967028985507, 5.418763424735789, 2.4539480338103257, 0.0), # 13
(4.743037830455566, 9.8858587654321, 8.154500428449014, 4.3192966183574875, 2.4871179487179482, 0.0, 7.567462801932367, 9.948471794871793, 6.478944927536231, 5.4363336189660085, 2.471464691358025, 0.0), # 14
(4.773430640913081, 9.953153409090907, 8.179487467866322, 4.33320652173913, 2.4977884615384616, 0.0, 7.563790760869566, 9.991153846153846, 6.499809782608695, 5.452991645244214, 2.488288352272727, 0.0), # 15
(4.802901590088772, 10.017573221099887, 8.203076692373608, 4.346355555555555, 2.507882051282051, 0.0, 7.560044444444445, 10.031528205128204, 6.519533333333333, 5.468717794915738, 2.504393305274972, 0.0), # 16
(4.831417286118428, 10.079015356341188, 8.22523853898886, 4.358724879227053, 2.517383333333333, 0.0, 7.556224214975846, 10.069533333333332, 6.538087318840581, 5.483492359325907, 2.519753839085297, 0.0), # 17
(4.8589443371378405, 10.13737696969697, 8.245943444730077, 4.370295652173914, 2.5262769230769235, 0.0, 7.552330434782609, 10.105107692307694, 6.55544347826087, 5.4972956298200515, 2.5343442424242424, 0.0), # 18
(4.8854493512828014, 10.192555216049382, 8.265161846615253, 4.381049033816424, 2.534547435897436, 0.0, 7.5483634661835755, 10.138189743589743, 6.571573550724637, 5.510107897743501, 2.5481388040123454, 0.0), # 19
(4.910898936689104, 10.244447250280581, 8.282864181662381, 4.3909661835748794, 2.542179487179487, 0.0, 7.544323671497584, 10.168717948717948, 6.58644927536232, 5.5219094544415865, 2.5611118125701453, 0.0), # 20
(4.935259701492538, 10.292950227272724, 8.299020886889462, 4.400028260869565, 2.5491576923076917, 0.0, 7.540211413043479, 10.196630769230767, 6.600042391304348, 5.53268059125964, 2.573237556818181, 0.0), # 21
(4.958498253828894, 10.337961301907969, 8.313602399314481, 4.408216425120773, 2.555466666666666, 0.0, 7.5360270531400975, 10.221866666666664, 6.6123246376811595, 5.542401599542987, 2.584490325476992, 0.0), # 22
(4.980581201833967, 10.379377629068463, 8.326579155955441, 4.415511835748792, 2.5610910256410255, 0.0, 7.531770954106282, 10.244364102564102, 6.623267753623189, 5.551052770636961, 2.5948444072671157, 0.0), # 23
(5.001475153643547, 10.417096363636363, 8.337921593830332, 4.421895652173912, 2.5660153846153846, 0.0, 7.52744347826087, 10.264061538461538, 6.632843478260869, 5.558614395886888, 2.6042740909090907, 0.0), # 24
(5.0211467173934246, 10.451014660493826, 8.347600149957156, 4.427349033816426, 2.5702243589743587, 0.0, 7.523044987922706, 10.280897435897435, 6.641023550724639, 5.565066766638103, 2.6127536651234564, 0.0), # 25
(5.039562501219393, 10.481029674523006, 8.355585261353898, 4.431853140096617, 2.5737025641025637, 0.0, 7.518575845410628, 10.294810256410255, 6.647779710144927, 5.570390174235932, 2.6202574186307515, 0.0), # 26
(5.056689113257243, 10.507038560606059, 8.361847365038559, 4.435389130434783, 2.5764346153846156, 0.0, 7.514036413043479, 10.305738461538462, 6.653083695652175, 5.574564910025706, 2.6267596401515148, 0.0), # 27
(5.072493161642767, 10.52893847362514, 8.366356898029135, 4.437938164251207, 2.578405128205128, 0.0, 7.509427053140097, 10.313620512820512, 6.656907246376812, 5.5775712653527565, 2.632234618406285, 0.0), # 28
(5.086941254511755, 10.546626568462402, 8.369084297343615, 4.439481400966184, 2.579598717948718, 0.0, 7.504748128019324, 10.318394871794872, 6.659222101449276, 5.57938953156241, 2.6366566421156006, 0.0), # 29
(5.1000000000000005, 10.56, 8.370000000000001, 4.44, 2.58, 0.0, 7.5, 10.32, 6.660000000000001, 5.58, 2.64, 0.0), # 30
(5.112219245524297, 10.571108863636361, 8.369313550724637, 4.439906339869282, 2.5798539716312057, 0.0, 7.4934020156588375, 10.319415886524823, 6.659859509803923, 5.579542367149758, 2.6427772159090903, 0.0), # 31
(5.124174680306906, 10.582070909090909, 8.367273623188405, 4.439627189542483, 2.5794190070921985, 0.0, 7.483239613526571, 10.317676028368794, 6.659440784313724, 5.578182415458937, 2.6455177272727273, 0.0), # 32
(5.135871675191815, 10.592884772727274, 8.363909347826088, 4.439165294117647, 2.5786997872340423, 0.0, 7.469612293853072, 10.314799148936169, 6.658747941176471, 5.575939565217392, 2.6482211931818185, 0.0), # 33
(5.147315601023018, 10.603549090909091, 8.359249855072465, 4.438523398692811, 2.5777009929078014, 0.0, 7.452619556888223, 10.310803971631206, 6.657785098039217, 5.572833236714976, 2.6508872727272728, 0.0), # 34
(5.158511828644501, 10.614062499999998, 8.353324275362318, 4.437704248366013, 2.576427304964539, 0.0, 7.432360902881893, 10.305709219858157, 6.65655637254902, 5.568882850241546, 2.6535156249999994, 0.0), # 35
(5.169465728900256, 10.624423636363638, 8.346161739130434, 4.436710588235294, 2.5748834042553193, 0.0, 7.408935832083959, 10.299533617021277, 6.655065882352941, 5.564107826086956, 2.6561059090909094, 0.0), # 36
(5.180182672634271, 10.634631136363637, 8.337791376811595, 4.435545163398693, 2.573073971631205, 0.0, 7.382443844744294, 10.29229588652482, 6.65331774509804, 5.558527584541062, 2.6586577840909094, 0.0), # 37
(5.190668030690537, 10.644683636363636, 8.32824231884058, 4.4342107189542475, 2.5710036879432625, 0.0, 7.352984441112776, 10.28401475177305, 6.651316078431372, 5.5521615458937195, 2.661170909090909, 0.0), # 38
(5.200927173913044, 10.654579772727272, 8.317543695652173, 4.43271, 2.568677234042553, 0.0, 7.32065712143928, 10.274708936170212, 6.649065, 5.545029130434782, 2.663644943181818, 0.0), # 39
(5.21096547314578, 10.664318181818182, 8.305724637681159, 4.431045751633987, 2.566099290780141, 0.0, 7.285561385973679, 10.264397163120565, 6.646568627450981, 5.537149758454106, 2.6660795454545454, 0.0), # 40
(5.220788299232737, 10.673897499999997, 8.29281427536232, 4.429220718954248, 2.563274539007092, 0.0, 7.247796734965852, 10.253098156028368, 6.643831078431373, 5.5285428502415455, 2.6684743749999993, 0.0), # 41
(5.230401023017903, 10.683316363636365, 8.278841739130435, 4.427237647058823, 2.560207659574468, 0.0, 7.207462668665667, 10.240830638297872, 6.640856470588235, 5.519227826086957, 2.6708290909090913, 0.0), # 42
(5.239809015345269, 10.692573409090908, 8.26383615942029, 4.4250992810457515, 2.556903333333333, 0.0, 7.164658687323005, 10.227613333333332, 6.637648921568627, 5.509224106280192, 2.673143352272727, 0.0), # 43
(5.249017647058824, 10.701667272727272, 8.247826666666667, 4.422808366013072, 2.5533662411347517, 0.0, 7.119484291187739, 10.213464964539007, 6.634212549019608, 5.498551111111111, 2.675416818181818, 0.0), # 44
(5.258032289002557, 10.71059659090909, 8.23084239130435, 4.420367647058823, 2.5496010638297872, 0.0, 7.072038980509745, 10.198404255319149, 6.630551470588235, 5.487228260869566, 2.6776491477272724, 0.0), # 45
(5.266858312020461, 10.71936, 8.212912463768117, 4.417779869281045, 2.5456124822695037, 0.0, 7.022422255538898, 10.182449929078015, 6.626669803921568, 5.475274975845411, 2.67984, 0.0), # 46
(5.275501086956522, 10.727956136363636, 8.194066014492753, 4.415047777777778, 2.5414051773049646, 0.0, 6.970733616525071, 10.165620709219858, 6.6225716666666665, 5.462710676328501, 2.681989034090909, 0.0), # 47
(5.283965984654732, 10.736383636363637, 8.174332173913044, 4.412174117647059, 2.536983829787234, 0.0, 6.9170725637181425, 10.147935319148935, 6.618261176470588, 5.449554782608695, 2.6840959090909093, 0.0), # 48
(5.292258375959079, 10.744641136363633, 8.15374007246377, 4.409161633986929, 2.5323531205673757, 0.0, 6.861538597367982, 10.129412482269503, 6.613742450980394, 5.435826714975845, 2.6861602840909082, 0.0), # 49
(5.300383631713555, 10.752727272727268, 8.13231884057971, 4.406013071895425, 2.527517730496454, 0.0, 6.804231217724471, 10.110070921985816, 6.6090196078431385, 5.421545893719807, 2.688181818181817, 0.0), # 50
(5.308347122762149, 10.760640681818181, 8.110097608695652, 4.4027311764705885, 2.5224823404255314, 0.0, 6.7452499250374816, 10.089929361702126, 6.604096764705883, 5.406731739130435, 2.6901601704545453, 0.0), # 51
(5.316154219948849, 10.768379999999999, 8.087105507246376, 4.399318692810457, 2.517251631205674, 0.0, 6.684694219556889, 10.069006524822695, 6.5989780392156865, 5.391403671497584, 2.6920949999999997, 0.0), # 52
(5.3238102941176475, 10.775943863636364, 8.063371666666667, 4.395778366013072, 2.5118302836879436, 0.0, 6.622663601532567, 10.047321134751774, 6.593667549019608, 5.375581111111111, 2.693985965909091, 0.0), # 53
(5.331320716112533, 10.783330909090907, 8.038925217391304, 4.392112941176471, 2.5062229787234043, 0.0, 6.559257571214393, 10.024891914893617, 6.5881694117647065, 5.359283478260869, 2.6958327272727267, 0.0), # 54
(5.338690856777493, 10.790539772727271, 8.013795289855072, 4.388325163398693, 2.5004343971631204, 0.0, 6.494575628852241, 10.001737588652482, 6.58248774509804, 5.342530193236715, 2.697634943181818, 0.0), # 55
(5.3459260869565215, 10.79756909090909, 7.988011014492754, 4.384417777777777, 2.494469219858156, 0.0, 6.428717274695986, 9.977876879432625, 6.576626666666667, 5.325340676328502, 2.6993922727272723, 0.0), # 56
(5.353031777493607, 10.804417500000001, 7.96160152173913, 4.380393529411765, 2.4883321276595742, 0.0, 6.361782008995502, 9.953328510638297, 6.570590294117648, 5.307734347826087, 2.7011043750000003, 0.0), # 57
(5.360013299232737, 10.811083636363634, 7.934595942028984, 4.376255163398692, 2.4820278014184396, 0.0, 6.293869332000667, 9.928111205673758, 6.564382745098039, 5.289730628019323, 2.7027709090909084, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
95, # 1
)
| 113.686567
| 214
| 0.730498
| 5,147
| 38,085
| 5.403147
| 0.232174
| 0.31068
| 0.245955
| 0.466019
| 0.326429
| 0.32607
| 0.325566
| 0.325566
| 0.325566
| 0.325566
| 0
| 0.820083
| 0.118524
| 38,085
| 334
| 215
| 114.026946
| 0.008311
| 0.031797
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6e324a51b72b07457cd62a406e2c7a294aeca1af
| 188
|
py
|
Python
|
cge/output/trash/output.py
|
cadms/resfinder
|
f75c5205ca82ca825c2bef5494060d5169788135
|
[
"Apache-2.0"
] | null | null | null |
cge/output/trash/output.py
|
cadms/resfinder
|
f75c5205ca82ca825c2bef5494060d5169788135
|
[
"Apache-2.0"
] | null | null | null |
cge/output/trash/output.py
|
cadms/resfinder
|
f75c5205ca82ca825c2bef5494060d5169788135
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from .exceptions import DuplicateKeyError
from .exceptions import LockedObjectError
class Write():
"""
"""
def txt_table(tableresult):
pass
| 13.428571
| 41
| 0.675532
| 19
| 188
| 6.631579
| 0.842105
| 0.222222
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.223404
| 188
| 13
| 42
| 14.461538
| 0.856164
| 0.111702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
6e45ac8616c8ed801848a62f9e94011d10173310
| 126
|
py
|
Python
|
datastructures/arrays/array_from_permutation/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 3
|
2017-05-02T10:28:13.000Z
|
2019-02-06T09:10:11.000Z
|
datastructures/arrays/array_from_permutation/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 2
|
2017-06-21T20:39:14.000Z
|
2020-02-25T10:28:57.000Z
|
datastructures/arrays/array_from_permutation/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 2
|
2016-07-29T04:35:22.000Z
|
2017-01-18T17:05:36.000Z
|
from typing import List
def build_array(nums: List[int]) -> List[int]:
return [nums[nums[x]] for x in range(len(nums))]
| 21
| 52
| 0.674603
| 22
| 126
| 3.818182
| 0.681818
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 126
| 5
| 53
| 25.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2811705836c04b6c393b7f642db983414d044049
| 43,115
|
py
|
Python
|
scripts/create_experiments/create_json_experiments.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
scripts/create_experiments/create_json_experiments.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
scripts/create_experiments/create_json_experiments.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
import json
import numpy as np
from pathlib import Path
import os
def get_project_root() -> Path:
return Path(__file__).parent.parent
root_folder = get_project_root()
data = {
"info": "",
"additional_folder_name": "",
"base_config": "",
"observation": {
"observation_config":{}
},
"reward": {},
"scenario": {},
"merging_vehicle": {},
"exit_vehicle": {},
"agent_config": {
"exploration": {},
"model": {},
"optimizer":{}
},
"cruising_vehicle": {},
"action": {
"action_config": {},
}
}
# ini_folder="./scripts/rl_agents_scripts/configs/experiments/IEEE_Access/"
# ini_folder = "./scripts/rl_agents_scripts/configs/experiments/IROS/"
ini_folder = "./scripts/rl_agents_scripts/configs/experiments/Behavior/"
# base_name_json = "exp_merge_IROS_"
base_name_json = "exp_behavior_"
start_num =110
play_with_rewards = False
play_with_merging_position = False
play_with_coop = False
play_with_mission_is_controlled = False
play_with_randomness = False
ablation_action_history = False
ablation_state_representation_f1 = False
ablation_state_representation_f2 = False
complex_scenario = False
state_representation_conv = False
IROS_exp2000 = False
IROS_exp3000 = False
IROS_exp4000 = False
IROS_exp5000 = False
IROS_exp9100 = False
complex_100 = False
Behavior_100s = False
behavior_400s_sensitivity = False
behavior_500s_sensitivity = False
behavior_900s_sensitivity = False
generalization_100s = False
generalization_120s = False
generalization_200s = False
generalization_250s = False
generalization_300s = True
if generalization_100s:
ini_folder = os.path.join(root_folder,'configs/experiments/Multienv/')
base_name_json = "exp_generalization_"
base_config_folder = "configs/experiments/Multienv/"
# start_num = 400
start_num = 100
for base_config in ["exp_base_multi_sa_latent.json", "exp_multi_Grid_DQN_sa_latent.json","exp_base_multi_Image_sa_latent.json"]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
data["base_config"] = base_config_folder + base_config
for latent_dimention in [16,32,64]:
data['latent_dimention']=latent_dimention
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if generalization_120s:
ini_folder = os.path.join(root_folder,'configs/experiments/Multienv/')
base_name_json = "exp_generalization_"
base_config_folder = "configs/experiments/Multienv/"
# start_num = 400
start_num = 120
for base_config in ["exp_base_multi_sa.json", "exp_multi_Grid_DQN_sa.json","exp_base_multi_Image_sa.json"]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
data["base_config"] = base_config_folder + base_config
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if generalization_200s:
ini_folder = os.path.join(root_folder,'configs/experiments/Multienv/')
base_name_json = "exp_generalization_"
base_config_folder = "configs/experiments/Multienv/"
# start_num = 400
start_num = 200
for scenario in ["intersection", "roundabout","road_merge","road_exit"]:
for base_config in ["exp_base_multi_sa_latent.json", "exp_multi_Grid_DQN_sa_latent.json","exp_base_multi_Image_sa_latent.json"]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
data['scenario']['road_types']=[scenario]
data["base_config"] = base_config_folder + base_config
for latent_dimention in [16,32,64]:
data['latent_dimention']=latent_dimention
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if generalization_250s:
ini_folder = os.path.join(root_folder,'configs/experiments/Multienv/')
base_name_json = "exp_generalization_"
base_config_folder = "configs/experiments/Multienv/"
start_num = 250
for scenario in ["intersection", "roundabout","road_merge","road_exit"]:
for base_config in ["exp_base_multi_sa.json", "exp_multi_Grid_DQN_sa.json","exp_base_multi_Image_sa.json"]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
data['scenario']['road_types']=[scenario]
data["base_config"] = base_config_folder + base_config
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if generalization_300s:
ini_folder = os.path.join(root_folder,'configs/experiments/Multienv/')
base_name_json = "exp_generalization_"
base_config_folder = "configs/experiments/Multienv/"
start_num = 350
allenv = ["intersection", "roundabout","road_merge","road_exit"]
for base_config in ["exp_generalization_120.json", "exp_generalization_250.json","exp_generalization_253.json","exp_generalization_256.json","exp_generalization_259.json"]:
# for base_config in ["exp_generalization_100.json", "exp_generalization_200.json","exp_generalization_209.json","exp_generalization_218.json","exp_generalization_227.json"]:
for scenario in ["intersection", "roundabout", "road_merge", "road_exit", allenv]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
if isinstance(scenario,list):
data['scenario']['road_types'] = scenario
else:
data['scenario']['road_types']=[scenario]
data["base_config"] = base_config_folder + base_config
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if behavior_400s_sensitivity:
base_config_folder = "configs/experiments/Behavior/"
# start_num = 400
start_num = 420
for controlled_vehicle in [True, False]:
# for base_config in ["exp_agressive_DQN.json","exp_neutral_DQN.json","exp_conservative_DQN.json"]:
for base_config in ["exp_agressive_DQN_exit.json", "exp_neutral_DQN_exit.json", "exp_conservative_DQN_exit.json"]:
# for base_config in ["exp_agressive_DQN.json", "exp_conservative_DQN.json", "exp_neutral_DQN.json"]:
# for base_config in ["exp_agressive_MLP.json", "exp_conservative_MLP.json", "exp_neutral_MLP.json"]:
data["base_config"] = base_config_folder + base_config
for cooperative_flag, sympathy_flag in [(True, True), (False, False)]:
# for type in ["highway_env.vehicle.behavior.CustomVehicle" , "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
for type in ["highway_env.vehicle.behavior.CustomVehicle"]:
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
# data["observation"]["cooperative_perception"] = True
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["merging_vehicle"]['vehicles_type'] = type
data["merging_vehicle"]['controlled_vehicle'] = controlled_vehicle # True
# data["merging_vehicle"]['max_speed'] = 25
data["tracker_logging"] = True
if controlled_vehicle:
data['action']['action_config']['lateral'] = True
else:
data['action']['action_config']['lateral'] = False
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + \
" controlled_vehicle " + str(data["merging_vehicle"]['controlled_vehicle'])
data['info'] = info
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if behavior_500s_sensitivity:
start_num = 300
n = 4
distance_wantedn = np.linspace(7, 0.5, n)
time_wantedn = np.linspace(2, 0.5, n)
comfort_acc_maxn = np.linspace(2, 9, n)
comfort_acc_minn = np.linspace(-4, -12, n)
longitudinal = []
for i in range(0,n):
longitudinal.append((distance_wantedn[i], time_wantedn[i], comfort_acc_maxn[i], comfort_acc_minn[i]))
politenessn = np.linspace(0.7, 0, n)
lane_change_min_acc_gainn = np.linspace(0.4, 0, n)
lane_change_max_braking_imposedn = np.linspace(2, 12, n)
lateral = []
for i in range(0, n):
lateral.append((politenessn[i], lane_change_min_acc_gainn[i], lane_change_max_braking_imposedn[i]))
base_config_folder = "configs/experiments/Behavior/"
# start_num = 400
for controlled_vehicle in [False]:
for base_config in ["exp_behavior_highway_base.json"]:
# for base_config in ["exp_agressive_complex_DQN.json"]:
# for base_config in ["exp_agressive_DQN_exit.json"]:
# for base_config in ["exp_agressive_DQN.json"]:
for distance_wanted, time_wanted, comfort_acc_max, comfort_acc_min in longitudinal:
for politeness, lane_change_min_acc_gain, lane_change_max_braking_imposed in lateral:
data["cruising_vehicle"]['distance_wanted'] = distance_wanted
data["cruising_vehicle"]['time_wanted'] = time_wanted
data["cruising_vehicle"]['comfort_acc_max'] = comfort_acc_max
data["cruising_vehicle"]['comfort_acc_min'] = comfort_acc_min
data["cruising_vehicle"]['politeness'] = politeness
data["cruising_vehicle"]['lane_change_min_acc_gain'] = lane_change_min_acc_gain
data["cruising_vehicle"]['lane_change_max_braking_imposed'] = lane_change_max_braking_imposed
data["base_config"] = base_config_folder + base_config
for cooperative_flag, sympathy_flag in [(True, True), (False, False)]:
# for type in ["highway_env.vehicle.behavior.CustomVehicle" , "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
for type in ["highway_env.vehicle.behavior.CustomVehicle"]:
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
# data["observation"]["cooperative_perception"] = True
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["merging_vehicle"]['vehicles_type'] = type
data["merging_vehicle"]['controlled_vehicle'] = controlled_vehicle # True
# data["merging_vehicle"]['max_speed'] = 25
data["tracker_logging"] = True
if controlled_vehicle:
data['action']['action_config']['lateral'] = True
else:
data['action']['action_config']['lateral'] = True
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + \
" controlled_vehicle " + str(data["merging_vehicle"]['controlled_vehicle'])
data['info'] = info
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if behavior_900s_sensitivity:
start_num = 900
n = 2
distance_wantedn = np.linspace(7, 0.5, n)
time_wantedn = np.linspace(2, 0.5, n)
comfort_acc_maxn = np.linspace(2, 9, n)
comfort_acc_minn = np.linspace(-4, -12, n)
longitudinal = []
for i in range(0,n):
longitudinal.append((distance_wantedn[i], time_wantedn[i], comfort_acc_maxn[i], comfort_acc_minn[i]))
politenessn = np.linspace(0.7, 0, n)
lane_change_min_acc_gainn = np.linspace(0.4, 0, n)
lane_change_max_braking_imposedn = np.linspace(2, 12, n)
lateral = []
for i in range(0, n):
lateral.append((politenessn[i], lane_change_min_acc_gainn[i], lane_change_max_braking_imposedn[i]))
base_config_folder = "configs/experiments/Behavior/"
# start_num = 400
for controlled_vehicle in [False]:
# for base_config in ["exp_behavior_highway_base.json"]:
# for base_config in ["exp_agressive_complex_DQN.json"]:
for base_config in ["exp_agressive_DQN_exit.json"]:
# for base_config in ["exp_agressive_DQN.json"]:
for distance_wanted, time_wanted, comfort_acc_max, comfort_acc_min in longitudinal:
for politeness, lane_change_min_acc_gain, lane_change_max_braking_imposed in lateral:
data["cruising_vehicle"]['distance_wanted'] = distance_wanted
data["cruising_vehicle"]['time_wanted'] = time_wanted
data["cruising_vehicle"]['comfort_acc_max'] = comfort_acc_max
data["cruising_vehicle"]['comfort_acc_min'] = comfort_acc_min
data["cruising_vehicle"]['politeness'] = politeness
data["cruising_vehicle"]['lane_change_min_acc_gain'] = lane_change_min_acc_gain
data["cruising_vehicle"]['lane_change_max_braking_imposed'] = lane_change_max_braking_imposed
data["base_config"] = base_config_folder + base_config
for vals in [10,15,20,25,30]:
data["exit_vehicle"]['random_offset_exit'] = [vals, vals]
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if Behavior_100s:
base_config_folder = "configs/experiments/Behavior/"
start_num = 220
for base_config in ["exp_agressive_Conv3D.json","exp_conservative_Conv3D.json","exp_neutral_Conv3D.json"]:
# for base_config in ["exp_agressive_DQN.json", "exp_conservative_DQN.json", "exp_neutral_DQN.json"]:
# for base_config in ["exp_agressive_MLP.json", "exp_conservative_MLP.json", "exp_neutral_MLP.json"]:
data["base_config"] = base_config_folder + base_config
for cooperative_flag, sympathy_flag in [(True, True)]:
# for type in ["highway_env.vehicle.behavior.CustomVehicle" , "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
for controlled_vehicle in [True, False]:
for type in ["highway_env.vehicle.behavior.CustomVehicle"]:
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
# data["observation"]["cooperative_perception"] = True
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["merging_vehicle"]['vehicles_type'] = type
data["merging_vehicle"]['controlled_vehicle'] = controlled_vehicle # True
# data["merging_vehicle"]['max_speed'] = 25
data["tracker_logging"] = True
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + \
" controlled_vehicle " + str(data["merging_vehicle"]['controlled_vehicle'])
data['info'] = info
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if complex_100:
base_config_folder = "configs/experiments/complex/"
start_num = 100
for base_config in ["exp_merge_complex_100base-1.json","exp_merge_complex_100base-2.json", "exp_merge_complex_100base-3.json", "exp_merge_complex_100base-3-2.json"]:
for controlled_vehicle in [True, False]:
data["base_config"] = base_config_folder + base_config
data["merging_vehicle"]["controlled_vehicle"] = controlled_vehicle
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" controlled_vehicle " + str(controlled_vehicle)
data["additional_folder_name"] = "exp_merge_complex" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if IROS_exp9100:
base_config_folder = "configs/experiments/IROS/"
for base_config in ["exp_merge_IROS_9000base-1.json", "exp_merge_IROS_9000base-2.json",
"exp_merge_IROS_9000base-2-1.json","exp_merge_IROS_9000base-3.json",
"exp_merge_IROS_9000base-3-1.json","exp_merge_IROS_9000base-3-2.json"]:
for random_offset_merging in [20]:
data["base_config"] = base_config_folder + base_config
data["merging_vehicle"]["random_offset_merging"] = [-random_offset_merging , random_offset_merging]
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" random_offset_merging " + str(random_offset_merging)
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if IROS_exp2000:
base_config_folder = "configs/experiments/IROS/"
for network in ["ConvNetStanfordMARLNoRes"]:
for base_config in ["exp_merge_IROS_2000_base-1.json", "exp_merge_IROS_2000_base-2.json",
"exp_merge_IROS_2000_base-3.json"]:
for gama in [0.8, 0.99]:
for memory_capacity in [20000]:
for target_update in [50, 500]:
for tau in [10000]:
data["base_config"] = base_config_folder + base_config
data["agent_config"]["gamma"] = gama
data["agent_config"]["memory_capacity"] = memory_capacity
data["agent_config"]["target_update"] = target_update
data["agent_config"]["exploration"]["tau"] = tau
data["agent_config"]["model"]["type"] = network
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" model " + str(network) + \
" tau " + str(tau) + \
" target_update " + str(target_update) + \
" memory_capacity " + str(memory_capacity) + \
" gama " + str(gama)
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if IROS_exp3000:
base_config_folder = "configs/experiments/IROS/"
for network in ["ConvNet3Layer"]:
for base_config in ["exp_merge_IROS_3000base-1.json", "exp_merge_IROS_3000base-2.json",
"exp_merge_IROS_3000base-3.json","exp_merge_IROS_3000base-4.json"]:
for double in [False]:
for optimizer in ["ADAM", "RMS_PROP"]:
for policy_frequency in [1, 5]:
data["agent_config"]["model"]["type"] = network
data["base_config"] = base_config_folder + base_config
data["agent_config"]["double"] = double
data["agent_config"]["optimizer"]["type"] = optimizer
data["policy_frequency"] = policy_frequency
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" double " + str(double) + \
" optimizer " + str(optimizer) + \
" network " + str(network) + \
" policy_frequency " + str(policy_frequency)
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if IROS_exp4000:
base_config_folder = "configs/experiments/IROS/"
for network in ["ConvNetStanfordMARLNoRes", "ConvNetStanfordMARLRes"]:
for base_config in ["exp_merge_IROS_4000base-1.json", "exp_merge_IROS_4000base-2.json",
"exp_merge_IROS_4000base-3.json","exp_merge_IROS_4000base-4.json"]:
for tau in [20000]:
for exploration in ["EpsilonGreedyLinear"]:
# for exploration in ["EpsilonGreedy", "EpsilonGreedyLinear"]:
# for observation_shape in [[500, 64], [200, 64]]:
for observation_shape in [[200,64]]:
data["agent_config"]["model"]["type"] = network
data["base_config"] = base_config_folder + base_config
# data["agent_config"]["double"] = double
# data["agent_config"]["optimizer"]["type"] = optimizer
# data["policy_frequency"] = policy_frequency
data["agent_config"]["exploration"]["method"] = exploration
data["agent_config"]["exploration"]["tau"] = tau
data["observation"]["observation_config"]["observation_shape"] = observation_shape
data["observation"]["observation_config"]["observation_shape"] = observation_shape
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" exploration " + str(exploration) + \
" tau " + str(tau) + \
" observation_shape " + str(observation_shape) + \
" network " + str(network)
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if IROS_exp5000:
base_config_folder = "configs/experiments/IROS/"
for network in ["ConvNet3D", "ConvNet3DResidual"]:
for base_config in ["exp_merge_IROS_5000base-1.json", "exp_merge_IROS_5000base-2.json" ]:
for tau in [20000]:
for exploration in ["EpsilonGreedyLinear"]:
# for exploration in ["EpsilonGreedy", "EpsilonGreedyLinear"]:
for observation_shape in [[500, 64], [200, 64]]:
# for observation_shape in [[200,64]]:
for history_stack_size in [5,10,15]:
data["agent_config"]["model"]["type"] = network
data["base_config"] = base_config_folder + base_config
# data["agent_config"]["double"] = double
# data["agent_config"]["optimizer"]["type"] = optimizer
# data["policy_frequency"] = policy_frequency
data["agent_config"]["exploration"]["method"] = exploration
data["agent_config"]["exploration"]["tau"] = tau
data["observation"]["observation_config"]["observation_shape"] = observation_shape
data["observation"]["observation_config"]["observation_shape"] = observation_shape
data["observation"]["observation_config"]["history_stack_size"] = history_stack_size
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" exploration " + str(exploration) + \
" tau " + str(tau) + \
" observation_shape " + str(observation_shape) + \
" history_stack_size " + str(history_stack_size) + \
" network " + str(network)
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if play_with_rewards:
for collision_reward in [0, -2]:
for successful_merging_reward in [0,10]:
for high_speed_reward in [1]:
for cooperative_reward in [4]:
data["reward"]["cooperative_flag"] = True
data["reward"]["sympathy_flag"] = True
if successful_merging_reward == 0:
data["reward"]["sympathy_flag"] = False
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" collision_reward= " + str(collision_reward) + " and " + \
"successful_merging_reward= " + str(successful_merging_reward) + " and " + \
"high_speed_reward= " + str(high_speed_reward) + " and " + \
"cooperative_reward= " + str(cooperative_reward)
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["reward"]['collision_reward'] = collision_reward
data["reward"]['successful_merging_reward'] = successful_merging_reward
data["reward"]['high_speed_reward'] = high_speed_reward
data["reward"]['cooperative_reward'] = cooperative_reward
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if complex_scenario:
for on_desired_lane_reward in [2, 5, 10]:
data["reward"]["cooperative_flag"] = True
data["reward"]["sympathy_flag"] = False
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["reward"]['on_desired_lane_reward'] = on_desired_lane_reward
data["tracker_logging"] = True
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" on_desired_lane_reward= " + str(on_desired_lane_reward)
data['info'] = info
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if play_with_merging_position:
for initial_position in [90, 95, 100]:
for speed in [23, 25, 27]:
data["reward"]["cooperative_flag"] = True
data["reward"]["sympathy_flag"] = False
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
"initial_position= " + str(initial_position) + " and " + \
"speed= " + str(speed)
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["merging_vehicle"]['initial_position'] = [initial_position, 0]
data["merging_vehicle"]['speed'] = speed
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if play_with_coop:
base_config_folder = "configs/experiments/complex/"
start_num = 300
for base_config in ["exp_merge_complex_100base-1.json"]:
data["base_config"] = base_config_folder + base_config
for cooperative_flag, sympathy_flag in [(True, True), (True, False), (False, False)]:
# for type in ["highway_env.vehicle.behavior.CustomVehicle" , "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
for controlled_vehicle in [True, False]:
for type in ["highway_env.vehicle.behavior.CustomVehicle"]:
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
# data["observation"]["cooperative_perception"] = True
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["merging_vehicle"]['vehicles_type'] = type
data["merging_vehicle"]['controlled_vehicle'] = controlled_vehicle # True
# data["merging_vehicle"]['max_speed'] = 25
data["tracker_logging"] = True
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + \
" controlled_vehicle " + str(data["merging_vehicle"]['controlled_vehicle'])
data['info'] = info
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if play_with_randomness:
for cooperative_flag, sympathy_flag in [(True, True), (True, False), (False, False)]:
# for type in ["highway_env.vehicle.behavior.CustomVehicle" , "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
for type in ["highway_env.vehicle.behavior.CustomVehicle"]:
for random_offset_merging, randomize_speed_offset in zip([2, 10, 15], [0, 4, 6]):
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
data["observation"]["cooperative_perception"] = True
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data["merging_vehicle"]['vehicles_type'] = type
data["merging_vehicle"]['controlled_vehicle'] = True
data["merging_vehicle"]['random_offset_merging'] = [-random_offset_merging, random_offset_merging]
data["scenario"]['randomize_speed_offset'] = [-randomize_speed_offset, randomize_speed_offset]
data["tracker_logging"] = True
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + \
" cooperative_perception " + str(data["observation"]["cooperative_perception"]) + \
" controlled_vehicle" + str(data["merging_vehicle"]['controlled_vehicle']) + \
" random_offset_merging" + str(data["merging_vehicle"]['random_offset_merging']) + \
" randomize_speed_offset" + str(data["scenario"]['randomize_speed_offset'])
data['info'] = info
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
# for type in ["highway_env.vehicle.behavior.CustomVehicle", "highway_env.vehicle.behavior.CustomVehicleAggressive"]:
# data["reward"]["cooperative_flag"] = False
# data["reward"]["sympathy_flag"] = False
# data["observation"]["cooperative_perception"] = False
#
# info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
# " cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
# " sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
# " type= " + str(type) + " cooperative_perception " + str(data["observation"]["cooperative_perception"])
#
# data["additional_folder_name"] = "exp_merge_" + str(start_num)
# data['info'] = info
# data["merging_vehicle"]['vehicles_type'] = type
# data["tracker_logging"] = True
#
# name = ini_folder + "exp_merge_" + str(start_num) + '.json'
# start_num += 1
# with open(name, 'w') as outfile:
# json.dump(data, outfile)
if play_with_mission_is_controlled:
type = "ControlledVehicle"
for cooperative_flag, sympathy_flag in [(True, True), (True, False), (False, False)]:
data["reward"]["cooperative_flag"] = cooperative_flag
data["reward"]["sympathy_flag"] = sympathy_flag
data["observation"]["cooperative_perception"] = True
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + " cooperative_perception " + str(data["observation"]["cooperative_perception"])
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["merging_vehicle"]['vehicles_type'] = type
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
data["reward"]["cooperative_flag"] = False
data["reward"]["sympathy_flag"] = False
data["observation"]["cooperative_perception"] = False
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_base " + \
" cooperative_flag " + str(data["reward"]["cooperative_flag"]) + \
" sympathy_flag " + str(data["reward"]["sympathy_flag"]) + \
" type= " + str(type) + " cooperative_perception " + str(data["observation"]["cooperative_perception"])
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["merging_vehicle"]['vehicles_type'] = type
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if ablation_action_history:
for action_history_type in ["discrete", "xy_discrete", "binary"]:
for action_history_count in [0, 2, 5, 10, 15]:
data["observation"]["action_history_type"] = action_history_type
data["observation"]["action_history_count"] = action_history_count
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_merge_300 " + \
" action_history_type " + str(data["observation"]["action_history_type"]) + \
" action_history_count " + str(data["observation"]["action_history_count"])
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if ablation_state_representation_f1:
for mission_vehicle_observation in [True, False]:
for features in [["presence", "x", "y", "vx", "vy", "is_controlled"], ["presence", "x", "y", "vx", "vy"],
["x", "y", "vx", "vy", "is_controlled"]]:
for order in ["sorted", "sorted_by_id", "shuffled"]: # wrong shuffled sorted_by_x
data["observation"]["mission_vehicle_observation"] = mission_vehicle_observation
data["observation"]["features"] = features
data["observation"]["order"] = order
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_merge_300 " + \
" mission_vehicle_observation " + str(data["observation"]["mission_vehicle_observation"]) + \
" features " + str(data["observation"]["features"]) + \
" order " + str(data["observation"]["order"])
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if ablation_state_representation_f2:
for absolute in [True, False]:
for see_behind in [True, False]:
for normalize in [True, False]:
for cooperative_perception in [True, False]:
data["observation"]["absolute"] = absolute
data["observation"]["see_behind"] = see_behind
data["observation"]["normalize"] = normalize
data["observation"]["cooperative_perception"] = cooperative_perception
info = "exp_merge_" + str(start_num) + " similar to IEEE_Access/exp_merge_300 " + \
" absolute " + str(data["observation"]["absolute"]) + \
" see_behind " + str(data["observation"]["see_behind"]) + \
" normalize " + str(data["observation"]["normalize"]) + \
" cooperative_perception " + str(data["observation"]["cooperative_perception"])
data["additional_folder_name"] = "exp_merge_" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + "exp_merge_" + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
if state_representation_conv:
base_config_folder = "configs/experiments/IROS/"
for cruising_vehicles_front, lanes_count in [(False, 1), (False, 3), (True, 3)]:
for base_config in ["exp_base_conv_multiagent_grid.json", "exp_base_conv_multiagent_heatmap.json",
"exp_base_conv_multiagent_image.json"]:
data["cruising_vehicles_front"] = cruising_vehicles_front
data["cruising_vehicles_front_random_everywhere"] = cruising_vehicles_front
data["lanes_count"] = lanes_count
data["base_config"] = base_config_folder + base_config
info = "exp_merge_" + str(start_num) + " similar to " + data["base_config"] + \
" cruising_vehicles_front " + str(data["cruising_vehicles_front"]) + \
" lanes_count " + str(data["lanes_count"])
data["additional_folder_name"] = "exp_merge_IROS" + str(start_num)
data['info'] = info
data["tracker_logging"] = True
name = ini_folder + base_name_json + str(start_num) + '.json'
start_num += 1
with open(name, 'w') as outfile:
json.dump(data, outfile)
print("End")
| 52.515225
| 178
| 0.569547
| 4,556
| 43,115
| 5.056409
| 0.059263
| 0.039588
| 0.032947
| 0.03056
| 0.825845
| 0.796501
| 0.772236
| 0.751791
| 0.734601
| 0.723488
| 0
| 0.018092
| 0.310286
| 43,115
| 820
| 179
| 52.579268
| 0.7566
| 0.097182
| 0
| 0.644231
| 0
| 0
| 0.267973
| 0.09938
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001603
| false
| 0
| 0.00641
| 0.001603
| 0.009615
| 0.001603
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2880ba1a8816cb45aa247d1124e50c11fd2c533b
| 290
|
py
|
Python
|
templates/template.py
|
tildeSlashAi/nldenet
|
953f947bd82eb577fa44f7e86ca5b660c78f885e
|
[
"MIT"
] | null | null | null |
templates/template.py
|
tildeSlashAi/nldenet
|
953f947bd82eb577fa44f7e86ca5b660c78f885e
|
[
"MIT"
] | null | null | null |
templates/template.py
|
tildeSlashAi/nldenet
|
953f947bd82eb577fa44f7e86ca5b660c78f885e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ----------------------------------------------------
# (c) 2020 tildeSlashAi Team, All Rights Reserved.
# Licensed under the MIT License
#
# tildeSlashAi Team:
# - Dominique F. Garmier
# ----------------------------------------------------
| 29
| 54
| 0.42069
| 24
| 290
| 5.083333
| 0.916667
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019685
| 0.124138
| 290
| 9
| 55
| 32.222222
| 0.46063
| 0.937931
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.