hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5c2d513c3bdb6e51f286fc9a280da8e852393fd0
| 1,186
|
py
|
Python
|
examples/blueray.py
|
thehappydinoa/iTachModulePython
|
5fc0597ffc29bf899ec2cf0d09ad557bf071fc4f
|
[
"MIT"
] | 3
|
2019-01-10T04:10:47.000Z
|
2021-05-12T18:29:59.000Z
|
examples/blueray.py
|
thehappydinoa/iTachModulePython
|
5fc0597ffc29bf899ec2cf0d09ad557bf071fc4f
|
[
"MIT"
] | 3
|
2020-09-24T10:47:24.000Z
|
2021-06-25T15:42:18.000Z
|
examples/blueray.py
|
thehappydinoa/iTachModulePython
|
5fc0597ffc29bf899ec2cf0d09ad557bf071fc4f
|
[
"MIT"
] | 6
|
2018-08-12T13:27:24.000Z
|
2021-12-09T01:11:11.000Z
|
from itachip2ir import VirtualDevice, iTach
commands = {
"toggle_power": "sendir,1:3,1,40192,1,1,97,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,529,96,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,528,96,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,553,97,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,554,97,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,554,97,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,554,97,23,49,23,25,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,25,23,49,23,49,23,25,23,49,23,25,23,25,23,25,23,49,23,49,23,49,4000"
}
itach = iTach(ipaddress="192.168.1.111", port=4998)
blueray = itach.add(VirtualDevice(
name="blueray", commands=commands))
if __name__ == "__main__":
print(blueray.toggle_power())
| 84.714286
| 934
| 0.677909
| 333
| 1,186
| 2.384384
| 0.102102
| 0.352645
| 0.528967
| 0.423174
| 0.734257
| 0.734257
| 0.734257
| 0.734257
| 0.734257
| 0.734257
| 0
| 0.542358
| 0.03457
| 1,186
| 13
| 935
| 91.230769
| 0.151092
| 0
| 0
| 0
| 0
| 0.111111
| 0.802698
| 0.768971
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
30eb8c0e51411133d5312b5d388e2f11848f39e7
| 311
|
py
|
Python
|
q2_tsne/__init__.py
|
azhang30/q2-tsne
|
8e5f1ebf7fbd5b3882399c177bde7d2329a7df4d
|
[
"BSD-3-Clause"
] | null | null | null |
q2_tsne/__init__.py
|
azhang30/q2-tsne
|
8e5f1ebf7fbd5b3882399c177bde7d2329a7df4d
|
[
"BSD-3-Clause"
] | null | null | null |
q2_tsne/__init__.py
|
azhang30/q2-tsne
|
8e5f1ebf7fbd5b3882399c177bde7d2329a7df4d
|
[
"BSD-3-Clause"
] | null | null | null |
from q2_tsne._t_sne import (_joint_probabilities, _joint_probabilities_nn, _kl_divergence, _kl_divergence_bh, _gradient_descent, trustworthiness, __init__)
__all__ = ['_joint_probabilities', '_joint_probabilities_nn', '_kl_divergence', '_kl_divergence_bh', '_gradient_descent', 'trustworthiness', '__init__']
| 77.75
| 156
| 0.829582
| 35
| 311
| 6.257143
| 0.485714
| 0.328767
| 0.210046
| 0.328767
| 0.894977
| 0.894977
| 0.894977
| 0.894977
| 0.894977
| 0.894977
| 0
| 0.003448
| 0.067524
| 311
| 3
| 157
| 103.666667
| 0.751724
| 0
| 0
| 0
| 0
| 0
| 0.366559
| 0.073955
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
30fd9a718f18befaf36a2fe2388d90d973ace339
| 132
|
py
|
Python
|
src/arcrest/manageorg/_marketplace.py
|
kevinsigwart/ArcREST
|
2d83bd87e23f3f78d6bba2131e6d47cf8e2e07c1
|
[
"Apache-2.0"
] | 208
|
2015-01-08T18:14:02.000Z
|
2022-03-31T02:45:42.000Z
|
src/arcrest/manageorg/_marketplace.py
|
kevinsigwart/ArcREST
|
2d83bd87e23f3f78d6bba2131e6d47cf8e2e07c1
|
[
"Apache-2.0"
] | 279
|
2015-01-16T16:30:35.000Z
|
2020-03-31T11:57:47.000Z
|
src/arcrest/manageorg/_marketplace.py
|
kevinsigwart/ArcREST
|
2d83bd87e23f3f78d6bba2131e6d47cf8e2e07c1
|
[
"Apache-2.0"
] | 164
|
2015-01-21T13:49:05.000Z
|
2021-12-01T18:13:05.000Z
|
"""
Module that controls marketplace functions
TODO
"""
from __future__ import absolute_import
from __future__ import print_function
| 22
| 42
| 0.840909
| 16
| 132
| 6.3125
| 0.75
| 0.19802
| 0.316832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 132
| 6
| 43
| 22
| 0.863248
| 0.356061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
a514b321b7603598344a57ab4ad75e1a425307ba
| 33
|
py
|
Python
|
jamaah/tasks/__init__.py
|
ganggas95/jamaah_nw_be
|
75b189d585a604b8e6de4dd3bdd77397d8e19fc6
|
[
"MIT"
] | null | null | null |
jamaah/tasks/__init__.py
|
ganggas95/jamaah_nw_be
|
75b189d585a604b8e6de4dd3bdd77397d8e19fc6
|
[
"MIT"
] | null | null | null |
jamaah/tasks/__init__.py
|
ganggas95/jamaah_nw_be
|
75b189d585a604b8e6de4dd3bdd77397d8e19fc6
|
[
"MIT"
] | 1
|
2020-02-12T09:22:01.000Z
|
2020-02-12T09:22:01.000Z
|
import jamaah.tasks.jamaah_tasks
| 16.5
| 32
| 0.878788
| 5
| 33
| 5.6
| 0.6
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 33
| 1
| 33
| 33
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb648403a5060da0903fbec92acdd97c259ff02b
| 6,646
|
py
|
Python
|
smbf/usr/banner.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
smbf/usr/banner.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
smbf/usr/banner.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa6\x08\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns&\x08\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa6\x07\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns&\x07\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa6\x06\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns&\x06\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa6\x05\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns&\x05\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa6\x04\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00sB\x00\x00\x00d\x00d\x01l\x00m\x01Z\x01m\x02Z\x02m\x03Z\x03\x01\x00d\x02Z\x04d\x03Z\x05d\x04Z\x06d\x05Z\x07e\x01d\x06d\x07\x8d\x01\x01\x00d\x08d\t\x84\x00Z\x08d\nd\x0b\x84\x00Z\td\x0cS\x00)\r\xe9\x00\x00\x00\x00)\x03\xda\x04init\xda\x04Fore\xda\x04Backz\x06\x1b[94;mz\x06\x1b[92;mz\x05\x1b[0;mz\x06\x1b[93;mT)\x01Z\tautoresetc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00C\x00\x00\x00sL\x00\x00\x00t\x00d\x01t\x01\x9b\x00d\x02t\x02\x9b\x00d\x03t\x01\x9b\x00d\x04t\x02\x9b\x00d\x05t\x01\x9b\x00d\x06t\x02\x9b\x00d\x07t\x03\x9b\x00d\x08t\x02\x9b\x00d\tt\x04j\x05t\x06j\x07\x17\x00\x9b\x00d\n\x9d\x13\x83\x01\x01\x00d\x00S\x00)\x0bNz\x07\n u\t\x00\x00\x00\xe2\x95\x94\xe2\x95\x90\xe2\x95\x97u&\x00\x00\x00 \xe2\x94\x8c\xe2\x94\xac\xe2\x94\x90 \xe2\x94\x8c\xe2\x94\x90 \xe2\x94\x8c\xe2\x94\x80\xe2\x94\x90\n u\t\x00\x00\x00\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x97u%\x00\x00\x00 \xe2\x94\x82\xe2\x94\x82\xe2\x94\x82 \xe2\x94\x9c\xe2\x94\xb4\xe2\x94\x90 \xe2\x94\x9c\xe2\x94\xa4\n u\t\x00\x00\x00\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x9du\x1c\x00\x00\x00 \xe2\x94\xb4 \xe2\x94\xb4 \xe2\x94\x94\xe2\x94\x80\xe2\x94\x98 \xe2\x94\x94 Vz\x031.2z\x05dev\n z$ Simple multi bruteforce facebook )\x08\xda\x05print\xda\x01C\xda\x01r\xda\x01Gr\x04\x00\x00\x00Z\rLIGHTWHITE_EXr\x03\x00\x00\x00Z\x05BLACK\xa9\x00r\t\x00\x00\x00r\t\x00\x00\x00\xda\x00\xda\x06banner\x07\x00\x00\x00s&\x00\x00\x00\x00\x01\x04\x01\x02\xff\x04\x01\x02\xff\x04\x01\x02\xff\x04\x01\x02\xff\x04\x01\x02\xff\x04\x01\x02\xff\x04\x03\x02\xfd\x04\x01\x02\xff\x04\x04\n\xfcr\x0b\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xa4\x00\x00\x00t\x00\x83\x00\x01\x00t\x00d\x01t\x01\x9b\x00d\x02t\x02\x9b\x00d\x03\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\x04t\x02\x9b\x00d\x05\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\x06t\x02\x9b\x00d\x07\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\x08t\x02\x9b\x00d\t\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\nt\x02\x9b\x00d\x0b\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\x0ct\x02\x9b\x00d\r\x9d\x05\x83\x01\x01\x00t\x00d\x01t\x01\x9b\x00d\x0et\x02\x9b\x00d\x0f\x9d\x05\x83\x01\x01\x00d\x00S\x00)\x10Nz\x04 [ \xda\x011z\x12 ] From friendlist\xda\x012z\x15 ] From likes on post\xda\x013z\x11 ] By search name\xda\x014z# ] From group (only takes 100 user)\xda\x015z\x19 ] From friend friendlist\xda\x016z\x10 ] From hashtag \xda\x017z\x12 ] Re-check result)\x03r\x05\x00\x00\x00r\x06\x00\x00\x00r\x07\x00\x00\x00r\t\x00\x00\x00r\t\x00\x00\x00r\t\x00\x00\x00r\n\x00\x00\x00\xda\x04menu\r\x00\x00\x00s\x10\x00\x00\x00\x00\x01\x06\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01r\x13\x00\x00\x00N)\nZ\x08coloramar\x02\x00\x00\x00r\x03\x00\x00\x00r\x04\x00\x00\x00r\x06\x00\x00\x00r\x08\x00\x00\x00r\x07\x00\x00\x00\xda\x01Pr\x0b\x00\x00\x00r\x13\x00\x00\x00r\t\x00\x00\x00r\t\x00\x00\x00r\t\x00\x00\x00r\n\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\x0e\x00\x00\x00\x14\x01\x04\x01\x04\x01\x04\x01\x04\x01\n\x01\x08\x06)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xda\x00\xda\x08<module>\x06\x00\x00\x00s\x02\x00\x00\x00\x08\x01'))
| 3,323
| 6,631
| 0.756545
| 1,501
| 6,646
| 3.349101
| 0.11459
| 0.444002
| 0.43326
| 0.427293
| 0.804655
| 0.755719
| 0.734036
| 0.703799
| 0.682117
| 0.682117
| 0
| 0.387018
| 0.012489
| 6,646
| 2
| 6,631
| 3,323
| 0.378943
| 0
| 0
| 0
| 0
| 0.5
| 0.993982
| 0.944486
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 19
|
ebe0028c903dcf8d69442d7091665c14ebefa6df
| 110
|
py
|
Python
|
tests/cli/test_go.py
|
woutdenolf/python-zocalo
|
d35f1316bcc34c32bd6a8c62e6eab2c3c1484443
|
[
"BSD-3-Clause"
] | 2
|
2019-10-16T02:24:11.000Z
|
2021-03-29T03:30:50.000Z
|
tests/cli/test_go.py
|
woutdenolf/python-zocalo
|
d35f1316bcc34c32bd6a8c62e6eab2c3c1484443
|
[
"BSD-3-Clause"
] | 116
|
2019-05-03T09:38:03.000Z
|
2022-03-22T17:52:03.000Z
|
tests/cli/test_go.py
|
stufisher/python-zocalo
|
d35f1316bcc34c32bd6a8c62e6eab2c3c1484443
|
[
"BSD-3-Clause"
] | 3
|
2020-12-21T07:57:00.000Z
|
2021-12-10T15:48:52.000Z
|
import subprocess
def test_zocalo_go_help():
subprocess.run("zocalo.go --help", check=True, shell=True)
| 18.333333
| 62
| 0.736364
| 16
| 110
| 4.875
| 0.6875
| 0.205128
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 5
| 63
| 22
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ebe0803e270c69101eacdc729648e47df06c8b0b
| 337
|
py
|
Python
|
src/testcase/GN_APP/input_case/GN_APP_Forget_Password.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 28
|
2017-11-10T00:19:16.000Z
|
2022-02-19T16:42:05.000Z
|
src/testcase/GN_APP/input_case/GN_APP_Forget_Password.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | null | null | null |
src/testcase/GN_APP/input_case/GN_APP_Forget_Password.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 23
|
2017-08-22T06:12:19.000Z
|
2021-09-18T05:45:41.000Z
|
# coding=utf-8
try:
from src.testcase.GN_APP.case.GN_APP_FORGET_PASSWORD.GN_APP_FORGET_PASSWORD_001 import *
from src.testcase.GN_APP.case.GN_APP_FORGET_PASSWORD.GN_APP_FORGET_PASSWORD_002 import *
from src.testcase.GN_APP.case.GN_APP_FORGET_PASSWORD.GN_APP_FORGET_PASSWORD_003 import *
except ImportError as e:
print(e)
| 42.125
| 92
| 0.818991
| 58
| 337
| 4.344828
| 0.362069
| 0.178571
| 0.261905
| 0.452381
| 0.785714
| 0.785714
| 0.785714
| 0.785714
| 0.785714
| 0.785714
| 0
| 0.033223
| 0.106825
| 337
| 7
| 93
| 48.142857
| 0.803987
| 0.035608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.666667
| 0
| 0.666667
| 0.166667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 12
|
6941ebd18e5bedbc1a424b95d1cfc2f3e7162d05
| 141
|
py
|
Python
|
fseutil/tests/test_fse_flame_height.py
|
fsepy/fseutil
|
a73e6d614ce27252bcbed61661124a00a7bc6a80
|
[
"Apache-2.0"
] | null | null | null |
fseutil/tests/test_fse_flame_height.py
|
fsepy/fseutil
|
a73e6d614ce27252bcbed61661124a00a7bc6a80
|
[
"Apache-2.0"
] | 7
|
2020-02-07T09:44:44.000Z
|
2020-02-20T16:58:15.000Z
|
fseutil/tests/test_fse_flame_height.py
|
fsepy/fseutil
|
a73e6d614ce27252bcbed61661124a00a7bc6a80
|
[
"Apache-2.0"
] | null | null | null |
from fseutil.lib.fse_flame_height import _test_mean_flame_height_pd_7974 as test_mean_flame_height_pd_7974
test_mean_flame_height_pd_7974()
| 35.25
| 106
| 0.914894
| 26
| 141
| 4.269231
| 0.461538
| 0.396396
| 0.351351
| 0.513514
| 0.675676
| 0.675676
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 0.056738
| 141
| 3
| 107
| 47
| 0.744361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
69622b1e4032fbd16a17f2249aba7551c75d5a50
| 328
|
py
|
Python
|
kanobu/color.py
|
fossabot/kanobu
|
49a90260719c0b7519492e9bd9dc8d87b51eae87
|
[
"MIT"
] | 6
|
2020-05-21T23:29:02.000Z
|
2021-01-24T00:49:13.000Z
|
kanobu/color.py
|
fossabot/kanobu
|
49a90260719c0b7519492e9bd9dc8d87b51eae87
|
[
"MIT"
] | 10
|
2020-05-21T05:40:10.000Z
|
2021-01-05T00:00:55.000Z
|
kanobu/color.py
|
fossabot/kanobu
|
49a90260719c0b7519492e9bd9dc8d87b51eae87
|
[
"MIT"
] | 3
|
2020-05-22T02:28:16.000Z
|
2020-12-09T00:42:57.000Z
|
def blue(text):
return f"\033[34m {text}\033[0m"
def red(text):
return f"\033[31m{text}\033[0m"
def green(text):
return f"\033[32m{text}\033[0m"
def yellow(text):
return f"\033[33m{text}\033[0m"
def black(text):
return f"\033[30m{text}\033[0m"
def gray(text):
return f"\033[1;30m{text}\033[0m"
| 14.26087
| 37
| 0.615854
| 61
| 328
| 3.311475
| 0.295082
| 0.29703
| 0.326733
| 0.415842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204461
| 0.179878
| 328
| 22
| 38
| 14.909091
| 0.546468
| 0
| 0
| 0
| 0
| 0
| 0.393293
| 0.32622
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
15ca89a931de2a76ea1228b8fde674bfeecae37a
| 136
|
py
|
Python
|
platform/src/checkit/static/__init__.py
|
StevenClontz/checkit
|
ccc9b094bce503669a8905d0d73ce6ca96ecc6f0
|
[
"MIT"
] | 6
|
2020-11-05T00:47:30.000Z
|
2022-02-09T02:10:08.000Z
|
platform/src/checkit/static/__init__.py
|
StevenClontz/checkit
|
ccc9b094bce503669a8905d0d73ce6ca96ecc6f0
|
[
"MIT"
] | 49
|
2020-08-07T14:54:48.000Z
|
2022-02-02T20:44:29.000Z
|
platform/src/checkit/static/__init__.py
|
StevenClontz/checkit
|
ccc9b094bce503669a8905d0d73ce6ca96ecc6f0
|
[
"MIT"
] | 3
|
2020-09-16T20:19:30.000Z
|
2022-01-14T05:47:18.000Z
|
import importlib.resources
def read_resource(resource_name):
return importlib.resources.read_text("checkit.static", resource_name)
| 27.2
| 73
| 0.823529
| 17
| 136
| 6.352941
| 0.647059
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 136
| 4
| 74
| 34
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.666667
| 0.333333
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
15f5ef15cbc7e7a3c871a8bd4706f12ba34bd9d6
| 4,886
|
py
|
Python
|
tests/test_ldap_auth.py
|
phihos/Python-OpenVPN-LDAP-Auth
|
87dd986f49555d0fb50ad8d991cf02092a9d55dc
|
[
"MIT"
] | 1
|
2021-12-17T14:54:36.000Z
|
2021-12-17T14:54:36.000Z
|
tests/test_ldap_auth.py
|
phihos/python-openvpn-ldap-auth
|
87dd986f49555d0fb50ad8d991cf02092a9d55dc
|
[
"MIT"
] | null | null | null |
tests/test_ldap_auth.py
|
phihos/python-openvpn-ldap-auth
|
87dd986f49555d0fb50ad8d991cf02092a9d55dc
|
[
"MIT"
] | null | null | null |
import pytest
from ldap.ldapobject import SimpleLDAPObject
from tests.constants import TEST_TIMEOUT, TEST_USER_PASSWORD, \
OPENVPN_SERVER_ARGS_VIA_ENV, OPENVPN_SERVER_ARGS_VIA_FILE, \
CONFIG_CHALLENGE_RESPONSE_IGNORE, CONFIG_CHALLENGE_RESPONSE_PREPEND, CONFIG_CHALLENGE_RESPONSE_APPEND, \
OPENVPN_CLIENT_ARGS_WITH_CHALLENGE, TEST_USERNAME, OPENVPN_LOG_AUTH_SUCCEEDED_SERVER, \
OPENVPN_LOG_AUTH_SUCCEEDED_CLIENT, OPENVPN_CLIENT_ARGS_WITHOUT_CHALLENGE, TEST_USER_WRONG_PASSWORD, \
OPENVPN_LOG_AUTH_FAILED_CLIENT, OPENVPN_LOG_AUTH_FAILED_SERVER
from tests.utils import Process, OpenVPNProcess
@pytest.mark.timeout(TEST_TIMEOUT)
@pytest.mark.parametrize("openvpn_server", [OPENVPN_SERVER_ARGS_VIA_ENV, OPENVPN_SERVER_ARGS_VIA_FILE], indirect=True)
@pytest.mark.parametrize("config", [CONFIG_CHALLENGE_RESPONSE_IGNORE, CONFIG_CHALLENGE_RESPONSE_PREPEND,
CONFIG_CHALLENGE_RESPONSE_APPEND], indirect=True)
def test_authentication_with_challenge_response_should_succeed(connection: SimpleLDAPObject, openvpn_server: Process,
config: dict):
"""Splitting the user password between user pass and challenge response should pass authentication."""
process = OpenVPNProcess(OPENVPN_CLIENT_ARGS_WITH_CHALLENGE)
static_challenge_mode = config['authorization']['static_challenge']
if static_challenge_mode == 'ignore':
password = TEST_USER_PASSWORD
challenge_response = ''
elif static_challenge_mode == 'prepend':
password = TEST_USER_PASSWORD[1:]
challenge_response = TEST_USER_PASSWORD[:1]
else:
password = TEST_USER_PASSWORD[:1]
challenge_response = TEST_USER_PASSWORD[1:]
process.enter_username_password(TEST_USERNAME, password)
process.enter_challenge_response(challenge_response)
openvpn_server.check_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_SERVER)
process.check_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_CLIENT)
@pytest.mark.timeout(TEST_TIMEOUT)
@pytest.mark.parametrize("openvpn_server", [OPENVPN_SERVER_ARGS_VIA_ENV, OPENVPN_SERVER_ARGS_VIA_FILE], indirect=True)
@pytest.mark.parametrize("config", [CONFIG_CHALLENGE_RESPONSE_IGNORE, CONFIG_CHALLENGE_RESPONSE_PREPEND,
CONFIG_CHALLENGE_RESPONSE_APPEND], indirect=True)
def test_authentication_without_challenge_response_should_succeed(connection: SimpleLDAPObject, openvpn_server: Process,
config: dict):
process = OpenVPNProcess(OPENVPN_CLIENT_ARGS_WITHOUT_CHALLENGE)
process.enter_username_password(TEST_USERNAME, TEST_USER_PASSWORD)
openvpn_server.check_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_SERVER)
process.check_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_CLIENT)
@pytest.mark.timeout(TEST_TIMEOUT)
@pytest.mark.parametrize("openvpn_server", [OPENVPN_SERVER_ARGS_VIA_ENV, OPENVPN_SERVER_ARGS_VIA_FILE], indirect=True)
@pytest.mark.parametrize("config", [CONFIG_CHALLENGE_RESPONSE_IGNORE, CONFIG_CHALLENGE_RESPONSE_PREPEND,
CONFIG_CHALLENGE_RESPONSE_APPEND], indirect=True)
def test_authentication_with_challenge_response_with_wrong_password_should_fail(connection: SimpleLDAPObject,
openvpn_server: Process,
config: dict):
process = OpenVPNProcess(OPENVPN_CLIENT_ARGS_WITH_CHALLENGE)
process.enter_username_password(TEST_USERNAME, TEST_USER_WRONG_PASSWORD)
process.enter_challenge_response(TEST_USER_WRONG_PASSWORD)
process.check_in_output(OPENVPN_LOG_AUTH_FAILED_CLIENT)
process.check_not_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_CLIENT)
openvpn_server.check_in_output(OPENVPN_LOG_AUTH_FAILED_SERVER)
@pytest.mark.timeout(TEST_TIMEOUT)
@pytest.mark.parametrize("openvpn_server", [OPENVPN_SERVER_ARGS_VIA_ENV, OPENVPN_SERVER_ARGS_VIA_FILE], indirect=True)
@pytest.mark.parametrize("config", [CONFIG_CHALLENGE_RESPONSE_IGNORE, CONFIG_CHALLENGE_RESPONSE_PREPEND,
CONFIG_CHALLENGE_RESPONSE_APPEND], indirect=True)
def test_authentication_without_challenge_response_with_wrong_password_should_fail(connection: SimpleLDAPObject,
openvpn_server: Process,
config: dict):
process = OpenVPNProcess(OPENVPN_CLIENT_ARGS_WITHOUT_CHALLENGE)
process.enter_username_password(TEST_USERNAME, TEST_USER_WRONG_PASSWORD)
process.check_in_output(OPENVPN_LOG_AUTH_FAILED_CLIENT)
process.check_not_in_output(OPENVPN_LOG_AUTH_SUCCEEDED_CLIENT)
openvpn_server.check_in_output(OPENVPN_LOG_AUTH_FAILED_SERVER)
| 64.289474
| 120
| 0.744781
| 539
| 4,886
| 6.217069
| 0.118738
| 0.131901
| 0.102954
| 0.059684
| 0.875858
| 0.810206
| 0.787526
| 0.771113
| 0.771113
| 0.771113
| 0
| 0.001017
| 0.195047
| 4,886
| 75
| 121
| 65.146667
| 0.851004
| 0.019648
| 0
| 0.584615
| 0
| 0
| 0.025502
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061538
| false
| 0.215385
| 0.061538
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c656f6a1ef2826b5233566c0e1a25e7aedfa8b4e
| 45
|
py
|
Python
|
broadcast_qr/__init__.py
|
suri5471/broadcast_qr
|
d8898fb7210fd408180179e2d61396f85e94d716
|
[
"MIT"
] | null | null | null |
broadcast_qr/__init__.py
|
suri5471/broadcast_qr
|
d8898fb7210fd408180179e2d61396f85e94d716
|
[
"MIT"
] | null | null | null |
broadcast_qr/__init__.py
|
suri5471/broadcast_qr
|
d8898fb7210fd408180179e2d61396f85e94d716
|
[
"MIT"
] | null | null | null |
from broadcast_qr.r_from_qr import r_from_qr
| 22.5
| 44
| 0.888889
| 10
| 45
| 3.5
| 0.5
| 0.285714
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d669d7c0e5a8a0bcfec2e45dc9456c9fb20f5c3d
| 2,713
|
py
|
Python
|
SHmodel.py
|
gouchengcheng/baselinepp
|
ab36422830c245d5dd47f377a695f7159c954e73
|
[
"Apache-2.0"
] | null | null | null |
SHmodel.py
|
gouchengcheng/baselinepp
|
ab36422830c245d5dd47f377a695f7159c954e73
|
[
"Apache-2.0"
] | null | null | null |
SHmodel.py
|
gouchengcheng/baselinepp
|
ab36422830c245d5dd47f377a695f7159c954e73
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import math
class LN(object):
alpha = 0.0 # coefficient
variance = 0.0
Nr = None # y
Np = None # predicted y
precision = 0.0
recall = 0.0
def __init__(self):
print "LN model"
def train(self, Ni, Nr):
Ni = np.asarray(Ni,dtype=np.float64)
self.alpha = np.sum((np.log(Nr) - np.log(Ni))**2)/len(Ni)
self.variance = np.sum((np.log(Nr) - np.log(Ni)-np.ones(len(Ni))*self.alpha)**2)/(len(Ni)-1)
print "alpha: %f, variance: %f" % (self.alpha,self.variance)
return (self.alpha, self.variance)
def predict(self, Ni):
self.Np = np.exp(np.log(Ni) + np.ones(len(Ni))*(self.alpha + self.variance/2) ) #Ni*self.alpha
return self.Np
def RSE(self, Nr):
self.Nr = np.asarray(Nr, dtype=np.float64)
rse = np.sum((self.Np/self.Nr - 1)**2)
return rse
def PR(self, Nr, threshold):
self.Nr = np.asarray(Nr, dtype=np.float64)
a=b=c=0.0
for y, y1 in zip(self.Nr,self.Np):
if y >= threshold and y1 >= threshold:
a += 1
elif y < threshold and y1 >= threshold:
b += 1
elif y >= threshold and y1 < threshold:
c += 1
if a != 0.0:
self.precision = a/(a+b)
self.recall = a/(a+c)
else:
self.precision = 0.0
self.recall = 0.0
return (self.precision, self.recall)
class CS(object):
alpha = 0.0 # coefficient
Nr = None # y
Np = None # predicted y
precision = 0.0
recall = 0.0
def __init__(self):
print "CS model"
def train(self, Ni, Nr):
Ni = np.asarray(Ni,dtype=np.float64)
Nr = np.asarray(Nr, dtype=np.float64)
self.alpha = np.sum(Ni/Nr)/np.sum((Ni/Nr)**2)
print "alpha: %f" % self.alpha
return self.alpha
def predict(self, Ni):
self.Np = Ni*self.alpha
return self.Np
def RSE(self, Nr):
self.Nr = np.asarray(Nr, dtype=np.float64)
rse = np.sum((self.Np/self.Nr - 1)**2)
return rse
def PR(self, Nr, threshold):
self.Nr = np.asarray(Nr, dtype=np.float64)
a=b=c=0.0
for y, y1 in zip(self.Nr,self.Np):
if y >= threshold and y1 >= threshold:
a += 1
elif y < threshold and y1 >= threshold:
b += 1
elif y >= threshold and y1 < threshold:
c += 1
if a != 0.0:
self.precision = a/(a+b)
self.recall = a/(a+c)
else:
self.precision = 0.0
self.recall = 0.0
return (self.precision, self.recall)
| 28.557895
| 102
| 0.507188
| 400
| 2,713
| 3.42
| 0.1325
| 0.02193
| 0.071637
| 0.065789
| 0.844298
| 0.809211
| 0.777047
| 0.740497
| 0.717836
| 0.678363
| 0
| 0.037982
| 0.349797
| 2,713
| 94
| 103
| 28.861702
| 0.737528
| 0.02359
| 0
| 0.78481
| 0
| 0
| 0.018175
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025316
| null | null | 0.050633
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d6c0edbf10d6f5c69f5cddbeb9c69fa20c38dca5
| 136
|
py
|
Python
|
iwlearn/training/__init__.py
|
mfridental/iwlearn
|
95f092556b8d8d235ddf4471b065322e04f7f8c8
|
[
"Apache-2.0"
] | 1
|
2020-10-29T14:29:59.000Z
|
2020-10-29T14:29:59.000Z
|
iwlearn/training/__init__.py
|
mfridental/iwlearn
|
95f092556b8d8d235ddf4471b065322e04f7f8c8
|
[
"Apache-2.0"
] | null | null | null |
iwlearn/training/__init__.py
|
mfridental/iwlearn
|
95f092556b8d8d235ddf4471b065322e04f7f8c8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from iwlearn.training.dataset import DataSet, PillowPersister
from iwlearn.training.dataset_operations import *
| 34
| 61
| 0.786765
| 16
| 136
| 6.625
| 0.625
| 0.207547
| 0.358491
| 0.490566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008197
| 0.102941
| 136
| 3
| 62
| 45.333333
| 0.860656
| 0.154412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
03571eb33799fbb80b0e65559bb00879dc7c15bc
| 55,792
|
py
|
Python
|
mysite/DATOS_DP.py
|
evvrivas/lacel
|
c041dc1bf3b5fe7a2ebd45f0d6e5147c7655f163
|
[
"MIT"
] | null | null | null |
mysite/DATOS_DP.py
|
evvrivas/lacel
|
c041dc1bf3b5fe7a2ebd45f0d6e5147c7655f163
|
[
"MIT"
] | 4
|
2021-03-18T21:36:53.000Z
|
2022-01-13T00:58:40.000Z
|
mysite/DATOS_DP.py
|
evvrivas/lacel
|
c041dc1bf3b5fe7a2ebd45f0d6e5147c7655f163
|
[
"MIT"
] | null | null | null |
def datos_prueba_DP(request):
p1=Centrales.objects.get(nombre="CH 5 NOVIEMBRE")
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U1-CH5N-ELIM",marca="ELIM",modelo="C0848A",cararcteristicas="GENERADOR DE 20MW ",fecha_ingreso=date)
p21.save()
#ELIM U1
#2018
date=datetime.datetime(2018,1,11,16,58)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=16.11, potencia_reactiva=2.0, temperatura_promedio=59.3, temperatura_calent=35, humedad_relativa=61, CAG="SI",
PDI_C1_1=14.5,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.1,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,2,12,14,15)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.9, potencia_reactiva=4.7, temperatura_promedio=63.6, temperatura_calent=33, humedad_relativa=57, CAG="SI",
PDI_C1_1=17.5,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.1,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,3,17,13,43)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.47, potencia_reactiva=4.8, temperatura_promedio=66, temperatura_calent=35, humedad_relativa=63, CAG="SI",
PDI_C1_1=11.9,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.1,
PDI_C3_1=0.1,
PDI_C3_2=0.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,4,26,17,5)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=15.06, potencia_reactiva=1.7, temperatura_promedio=36, temperatura_calent=35, humedad_relativa=71, CAG="SI",
PDI_C1_1=8.2,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,5,3,14,20)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.99, potencia_activa=16.51, potencia_reactiva=4.7, temperatura_promedio=66.38, temperatura_calent=37, humedad_relativa=67, CAG="SI",
PDI_C1_1=8.6,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,6,1,14,34)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=17.03, potencia_reactiva=0.2, temperatura_promedio=60.03, temperatura_calent=39, humedad_relativa=74, CAG="SI",
PDI_C1_1=9.9,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,7,10,10,45)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=18.14, potencia_reactiva=3.8, temperatura_promedio=68, temperatura_calent=32, humedad_relativa=77, CAG="SI",
PDI_C1_1=10.9,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,8,13,14,27)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01, potencia_activa=16.88, potencia_reactiva=1.8, temperatura_promedio=64.62, temperatura_calent=37, humedad_relativa=75, CAG="SI",
PDI_C1_1=138.6,
PDI_C1_2=19.4,
PDI_C2_1=46.8,
PDI_C2_2=0,
PDI_C3_1=16.3,
PDI_C3_2=39.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,9,17,13,15)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01, potencia_activa=16.94, potencia_reactiva=3.6, temperatura_promedio=66.01, temperatura_calent=35, humedad_relativa=78, CAG="SI",
PDI_C1_1=125.2,
PDI_C1_2=27.4,
PDI_C2_1=17.3,
PDI_C2_2=23.1,
PDI_C3_1=56.3,
PDI_C3_2=35,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,10,3,13,23)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=18.6, potencia_reactiva=3.6, temperatura_promedio=67.8, temperatura_calent=35, humedad_relativa=75, CAG="SI",
PDI_C1_1=150.1,
PDI_C1_2=47.5,
PDI_C2_1=24.5,
PDI_C2_2=38.3,
PDI_C3_1=65,
PDI_C3_2=32.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,11,5,10,46)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.95, potencia_activa=19.06, potencia_reactiva=3.9, temperatura_promedio=68.43, temperatura_calent=34, humedad_relativa=75, CAG="SI",
PDI_C1_1=155.7,
PDI_C1_2=26.9,
PDI_C2_1=23.5,
PDI_C2_2=41,
PDI_C3_1=72.6,
PDI_C3_2=36.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,12,7,9,50)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=14.9, potencia_reactiva=4.2, temperatura_promedio=59.4, temperatura_calent=34, humedad_relativa=70, CAG="SI",
PDI_C1_1=155.7,
PDI_C1_2=26.9,
PDI_C2_1=23.5,
PDI_C2_2=41,
PDI_C3_1=72.6,
PDI_C3_2=36.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,1,7,9,56)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01, potencia_activa=14.9, potencia_reactiva=4.2, temperatura_promedio=59.2, temperatura_calent=32, humedad_relativa=68, CAG="SI",
PDI_C1_1=100.7,
PDI_C1_2=15.1,
PDI_C2_1=20.9,
PDI_C2_2=43,
PDI_C3_1=58.9,
PDI_C3_2=39.2,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,2,4,15,01)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.98, potencia_activa=19.01, potencia_reactiva=0.9, temperatura_promedio=66, temperatura_calent=35, humedad_relativa=56, CAG="SI",
PDI_C1_1=149.3,
PDI_C1_2=34.5,
PDI_C2_1=40.4,
PDI_C2_2=40.9,
PDI_C3_1=42.9,
PDI_C3_2=28.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,3,6,15,50)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=17.76, potencia_reactiva=4.3, temperatura_promedio=45, temperatura_calent=71.28, humedad_relativa=56, CAG="SI",
PDI_C1_1=158.7,
PDI_C1_2=52.3,
PDI_C2_1=88,
PDI_C2_2=62.5,
PDI_C3_1=95,
PDI_C3_2=62.4,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,4,8,10,41)
p31=Mediciones_DP(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01, potencia_activa=16.66, potencia_reactiva=3.2, temperatura_promedio=43, temperatura_calent=65.63, humedad_relativa=62, CAG="SI",
PDI_C1_1=196.7,
PDI_C1_2=52.4,
PDI_C2_1=49.6,
PDI_C2_2=39.4,
PDI_C3_1=66.3,
PDI_C3_2=57.5,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#///////////////////////////////////////////////////
#PRUEBAS DE DESCARGAS PARCIALES PARA LA UNIDAD 2 ///
#///////////////////////////////////////////////////
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U2-CH5N-ELIM",marca="ELIM",modelo="C0848A",cararcteristicas="GENERADOR 20MW",fecha_ingreso=date)
p21.save()
#ELIM U2
#2018
date=datetime.datetime(2018,1,11, 14,50)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=13.35, potencia_reactiva=5.52, temperatura_promedio=71.6, temperatura_calent=36, humedad_relativa=63, CAG="SI",
PDI_C1_1=0.3,
PDI_C1_2=0,
PDI_C2_1=0.1,
PDI_C2_2=0.2,
PDI_C3_1=0.1,
PDI_C3_2=0,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,2,7, 10,9)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.1, potencia_reactiva=2, temperatura_promedio=66.2, temperatura_calent=33, humedad_relativa=68, CAG="SI",
PDI_C1_1=0.1,
PDI_C1_2=0,
PDI_C2_1=0.1,
PDI_C2_2=0.1,
PDI_C3_1=0,
PDI_C3_2=0,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,3,17, 13,53)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=16.55, potencia_reactiva=4.7, temperatura_promedio=73.12, temperatura_calent=35, humedad_relativa=68, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.4,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,4,26, 14,10)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.08, potencia_reactiva=4.6, temperatura_promedio=76.24, temperatura_calent=33, humedad_relativa=68, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0.4,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,5,06, 9,10)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.9 , potencia_activa=15.81, potencia_reactiva=4.96, temperatura_promedio=71.94, temperatura_calent=37, humedad_relativa=74, CAG="SI",
PDI_C1_1=0.1,
PDI_C1_2=0,
PDI_C2_1=0.2,
PDI_C2_2=0.2,
PDI_C3_1=0,
PDI_C3_2=0,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,6,01, 15,57)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.18, potencia_reactiva=0.3, temperatura_promedio=69.4, temperatura_calent=36, humedad_relativa=75, CAG="SI",
PDI_C1_1=0.1,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.1,
PDI_C3_1=0,
PDI_C3_2=0.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,7,01, 15,57)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.18, potencia_reactiva=0.3, temperatura_promedio=69.4, temperatura_calent=36, humedad_relativa=75, CAG="SI",
PDI_C1_1=0.1,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.1,
PDI_C3_1=0,
PDI_C3_2=0.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,8,13, 13,44)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
PDI_C1_1=1.6,
PDI_C1_2=3,
PDI_C2_1=4.4,
PDI_C2_2=3.1,
PDI_C3_1=1.9,
PDI_C3_2=11.2,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,9,17, 13,26)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.95 , potencia_activa=16.2, potencia_reactiva=5, temperatura_promedio=79.4, temperatura_calent=40, humedad_relativa=78, CAG="SI",
PDI_C1_1=9.8,
PDI_C1_2=1,
PDI_C2_1=5.4,
PDI_C2_2=5.5,
PDI_C3_1=3.8,
PDI_C3_2=9.5,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,10,15, 11,2)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.16, potencia_reactiva=4.9, temperatura_promedio=76.63, temperatura_calent=37, humedad_relativa=78, CAG="SI",
PDI_C1_1=2.7,
PDI_C1_2=0.6,
PDI_C2_1=1.9,
PDI_C2_2=1.5,
PDI_C3_1=1.1,
PDI_C3_2=0.8,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,11,5, 10,55)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=56.96 , potencia_activa=19.03, potencia_reactiva=4.8, temperatura_promedio=77.99, temperatura_calent=0, humedad_relativa=75, CAG="SI",
PDI_C1_1=6,
PDI_C1_2=0.8,
PDI_C2_1=2.1,
PDI_C2_2=2.4,
PDI_C3_1=1.3,
PDI_C3_2=7.8,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,12,7, 9,5)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=56.96 , potencia_activa=19.03, potencia_reactiva=4.8, temperatura_promedio=77.99, temperatura_calent=0, humedad_relativa=75, CAG="SI",
PDI_C1_1=7.5,
PDI_C1_2=5.3,
PDI_C2_1=16.6,
PDI_C2_2=8.8,
PDI_C3_1=17,
PDI_C3_2=16.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,1,7,13,27)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.03 , potencia_activa=16.02, potencia_reactiva=5, temperatura_promedio=68, temperatura_calent=32, humedad_relativa=63, CAG="SI",
PDI_C1_1=22.01,
PDI_C1_2=0,
PDI_C2_1=9.5,
PDI_C2_2=4,
PDI_C3_1=9.4,
PDI_C3_2=0.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,2,4, 14,9)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.99 , potencia_activa=18.91, potencia_reactiva=4.1, temperatura_promedio=83.11, temperatura_calent=35, humedad_relativa=56, CAG="SI",
PDI_C1_1=9.3,
PDI_C1_2=1.5,
PDI_C2_1=5,
PDI_C2_2=5.4,
PDI_C3_1=9.6,
PDI_C3_2=0.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,3,11,13,51)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.99 , potencia_activa=18.6, potencia_reactiva=4.2, temperatura_promedio=78.79, temperatura_calent=47, humedad_relativa=52, CAG="SI",
PDI_C1_1=19.4,
PDI_C1_2=1.7,
PDI_C2_1=11.4,
PDI_C2_2=6.5,
PDI_C3_1=15.7,
PDI_C3_2=3.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,4,23, 17,37)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01 , potencia_activa=15.79, potencia_reactiva=3.1, temperatura_promedio=75.75, temperatura_calent=47, humedad_relativa=65, CAG="SI",
PDI_C1_1=6.9,
PDI_C1_2=0.6,
PDI_C2_1=4.7,
PDI_C2_2=3.4,
PDI_C3_1=4.9,
PDI_C3_2=3.4,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U3-CH5N-ELIM",marca="ELIM",modelo="C0848A",cararcteristicas="GENERADOR 20MW",fecha_ingreso=date)
p21.save()
#ELIM U3
#2017
date=datetime.datetime(2018,01,11, 17,5)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.76, potencia_reactiva=1.2, temperatura_promedio=73.6, temperatura_calent=35, humedad_relativa=61, CAG="SI",
PDI_C1_1=0.9,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=1.6,
PDI_C3_1=0.5,
PDI_C3_2=2.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,02,27, 9,8)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=12.15, potencia_reactiva=4.3, temperatura_promedio=57.1, temperatura_calent=30, humedad_relativa=72, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=1.2,
PDI_C3_1=0.5,
PDI_C3_2=1.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#ELIM U3
#2018
date=datetime.datetime(2018,03,11, 20,11)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=18.21, potencia_reactiva=4.7, temperatura_promedio=63.38, temperatura_calent=65, humedad_relativa=32, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=1.2,
PDI_C3_1=0.5,
PDI_C3_2=1.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,04,26, 13,52)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.6, potencia_reactiva=4.8, temperatura_promedio=62.76, temperatura_calent=33, humedad_relativa=37, CAG="SI",
PDI_C1_1=1.2,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=1.2,
PDI_C3_1=2,
PDI_C3_2=0,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,05,06, 9,22)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=10.8, potencia_reactiva=5, temperatura_promedio=58.4, temperatura_calent=33, humedad_relativa=79, CAG="SI",
PDI_C1_1=1,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.8,
PDI_C3_1=0,
PDI_C3_2=17,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,06,06, 8,23)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.49, potencia_reactiva=3.8, temperatura_promedio=60.47, temperatura_calent=42, humedad_relativa=70, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.8,
PDI_C3_1=0.2,
PDI_C3_2=1.2,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,07,06, 8,23)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.49, potencia_reactiva=3.8, temperatura_promedio=60.47, temperatura_calent=42, humedad_relativa=70, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.8,
PDI_C3_1=0.2,
PDI_C3_2=1.2,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,8,13, 14,51)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.02 , potencia_activa=17.34, potencia_reactiva=1.5, temperatura_promedio=63.28, temperatura_calent=37.5, humedad_relativa=75, CAG="SI",
PDI_C1_1=19.7,
PDI_C1_2=4.3,
PDI_C2_1=2.6,
PDI_C2_2=0,
PDI_C3_1=2.8,
PDI_C3_2=12.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,9,17, 13,37)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.96, potencia_activa=16.85, potencia_reactiva=4.7, temperatura_promedio=65.78, temperatura_calent=34.5, humedad_relativa=78, CAG="SI",
PDI_C1_1=20.3,
PDI_C1_2=3.2,
PDI_C2_1=0.4,
PDI_C2_2=24.1,
PDI_C3_1=2.5,
PDI_C3_2=8.8,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,10,8, 13,45)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=12.48, potencia_reactiva=5.1, temperatura_promedio=58.3, temperatura_calent=33, humedad_relativa=80, CAG="SI",
PDI_C1_1=17.6,
PDI_C1_2=2.2,
PDI_C2_1=11.7,
PDI_C2_2=3,
PDI_C3_1=2.8,
PDI_C3_2=4.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,11,05, 13,30)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.95 , potencia_activa=15.09, potencia_reactiva=4.8, temperatura_promedio=58.43, temperatura_calent=0, humedad_relativa=75, CAG="SI",
PDI_C1_1=18.2,
PDI_C1_2=3.7,
PDI_C2_1=0.7,
PDI_C2_2=17.9,
PDI_C3_1=2.6,
PDI_C3_2=6.9,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,12,7, 10,5)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01 , potencia_activa=14.9, potencia_reactiva=4.9, temperatura_promedio=56.6, temperatura_calent=34, humedad_relativa=70, CAG="SI",
PDI_C1_1=16,
PDI_C1_2=1.7,
PDI_C2_1=0.6,
PDI_C2_2=3.5,
PDI_C3_1=1.4,
PDI_C3_2=9.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,1,7, 14,14)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01, potencia_activa=16.3, potencia_reactiva=1.3, temperatura_promedio=56.3, temperatura_calent=31, humedad_relativa=61, CAG="SI",
PDI_C1_1=12.2,
PDI_C1_2=7.5,
PDI_C2_1=0.2,
PDI_C2_2=7,
PDI_C3_1=1.5,
PDI_C3_2=7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,2,3, 14,41)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.98 , potencia_activa=18.92, potencia_reactiva=3.8, temperatura_promedio=68.48, temperatura_calent=36, humedad_relativa=56, CAG="SI",
PDI_C1_1=11.9,
PDI_C1_2=1.2,
PDI_C2_1=5.8,
PDI_C2_2=6,
PDI_C3_1=1.1,
PDI_C3_2=8.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,3,3, 13,19)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.98 , potencia_activa=19.63, potencia_reactiva=3.4, temperatura_promedio=68.15, temperatura_calent=42, humedad_relativa=50, CAG="SI",
PDI_C1_1=17.2,
PDI_C1_2=2.1,
PDI_C2_1=5.2,
PDI_C2_2=6.2,
PDI_C3_1=1.6,
PDI_C3_2=7.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,4,8, 11,05)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.01 , potencia_activa=16.34, potencia_reactiva=1.9, temperatura_promedio=63.67, temperatura_calent=42, humedad_relativa=60, CAG="SI",
PDI_C1_1=15.6,
PDI_C1_2=1.7,
PDI_C2_1=0,
PDI_C2_2=11.2,
PDI_C3_1=1.4,
PDI_C3_2=8.4,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U4-CH5N-ELIM",marca="ELIM",modelo="C0848A",cararcteristicas="GENERADOR 20MW",fecha_ingreso=date)
p21.save()
#ELIM U4
#2017
date=datetime.datetime(2018,01,11, 17,47)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=15.76, potencia_reactiva=2.1, temperatura_promedio=64.7, temperatura_calent=32, humedad_relativa=74, CAG="SI",
PDI_C1_1=0.2,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.2,
PDI_C3_1=7.3,
PDI_C3_2=11.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,02,21, 17,47)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.79, potencia_reactiva=1.8, temperatura_promedio=66.2, temperatura_calent=33, humedad_relativa=68, CAG="SI",
PDI_C1_1=0.3,
PDI_C1_2=0,
PDI_C2_1=0.1,
PDI_C2_2=0.5,
PDI_C3_1=4.4,
PDI_C3_2=9.8,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,03,21, 17,47)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=17.79, potencia_reactiva=1.8, temperatura_promedio=66.2, temperatura_calent=33, humedad_relativa=68, CAG="SI",
PDI_C1_1=0.3,
PDI_C1_2=0,
PDI_C2_1=0.1,
PDI_C2_2=0.5,
PDI_C3_1=4.4,
PDI_C3_2=9.8,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,04,26, 14,0)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=14.2, potencia_reactiva=5.2, temperatura_promedio=72.4, temperatura_calent=36, humedad_relativa=76, CAG="SI",
PDI_C1_1=0.2,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=3.1,
PDI_C3_2=8.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,05,12, 13,31)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.99, potencia_activa=13.64, potencia_reactiva=1.7, temperatura_promedio=68.62, temperatura_calent=40, humedad_relativa=79, CAG="SI",
PDI_C1_1=0.1,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=3.1,
PDI_C3_2=7.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,06,06, 8,46)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=12.78, potencia_reactiva=3.2, temperatura_promedio=69.73, temperatura_calent=42, humedad_relativa=70, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.3,
PDI_C3_1=3.1,
PDI_C3_2=7.9,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,07,07, 8,46)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=12.78, potencia_reactiva=3.2, temperatura_promedio=69.73, temperatura_calent=42, humedad_relativa=70, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0.3,
PDI_C3_1=3.1,
PDI_C3_2=7.9,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,8,21, 10,14)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.98, potencia_activa=13.15, potencia_reactiva=1.7, temperatura_promedio=66.13, temperatura_calent=34, humedad_relativa=79, CAG="SI",
PDI_C1_1=31,
PDI_C1_2=2.6,
PDI_C2_1=23.6,
PDI_C2_2=0.3,
PDI_C3_1=23.4,
PDI_C3_2=67.3,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,9,11, 16,9)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=11.76, potencia_reactiva=3.8, temperatura_promedio=70.16, temperatura_calent=37, humedad_relativa=80, CAG="SI",
PDI_C1_1=40.3,
PDI_C1_2=20.5,
PDI_C2_1=18.4,
PDI_C2_2=11.6,
PDI_C3_1=36.2,
PDI_C3_2=96.9,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,10,15, 11,11)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=11.7, potencia_reactiva=4.4, temperatura_promedio=70.3, temperatura_calent=36, humedad_relativa=90, CAG="SI",
PDI_C1_1=42.7,
PDI_C1_2=0.6,
PDI_C2_1=18.1,
PDI_C2_2=13.8,
PDI_C3_1=30.1,
PDI_C3_2=99.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2018,11,05, 13,38)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=13.08, potencia_reactiva=4.2, temperatura_promedio=70.69, temperatura_calent=0, humedad_relativa=75, CAG="SI",
PDI_C1_1=46.6,
PDI_C1_2=3.6,
PDI_C2_1=28.6,
PDI_C2_2=18.1,
PDI_C3_1=36.2,
PDI_C3_2=120.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
date=datetime.datetime(2018,12,12, 10,18)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=13.9, potencia_reactiva=4.7, temperatura_promedio=65, temperatura_calent=34, humedad_relativa=72, CAG="SI",
PDI_C1_1=35.6,
PDI_C1_2=1,
PDI_C2_1=11.5,
PDI_C2_2=11.1,
PDI_C3_1=31.7,
PDI_C3_2=106.9,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,1,7, 13,38)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=13.71, potencia_reactiva=4.7, temperatura_promedio=66.5, temperatura_calent=34, humedad_relativa=66.05, CAG="SI",
PDI_C1_1=17.8,
PDI_C1_2=23.3,
PDI_C2_1=28.8,
PDI_C2_2=14.1,
PDI_C3_1=34.8,
PDI_C3_2=132.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,2,4, 14,51)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.02, potencia_activa=16, potencia_reactiva=2.9, temperatura_promedio=77.07, temperatura_calent=36, humedad_relativa=56, CAG="SI",
PDI_C1_1=79,
PDI_C1_2=2.6,
PDI_C2_1=34.3,
PDI_C2_2=11.3,
PDI_C3_1=48.3,
PDI_C3_2=162.6,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,3,26, 14,51)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.02, potencia_activa=14.2, potencia_reactiva=3, temperatura_promedio=70.79, temperatura_calent=47, humedad_relativa=33, CAG="SI",
PDI_C1_1=67.9,
PDI_C1_2=2.9,
PDI_C2_1=33.3,
PDI_C2_2=16.8,
PDI_C3_1=45.9,
PDI_C3_2=130.1,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,4,1, 14,16)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60, potencia_activa=15.94, potencia_reactiva=1.8, temperatura_promedio=76.91, temperatura_calent=46.9, humedad_relativa=60, CAG="SI",
PDI_C1_1=63.9,
PDI_C1_2=3.7,
PDI_C2_1=35.4,
PDI_C2_2=15,
PDI_C3_1=65.2,
PDI_C3_2=129.7,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#///////////////////////////////////////////////////
#PRUEBAS DE DESCARGAS PARCIALES PARA LA UNIDAD 6 ///
#///////////////////////////////////////////////////
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U6-CH5N-ANDRITZ",marca="ANDRITZ HYDRO",modelo="NA",cararcteristicas="GENERADOR 40 MB",fecha_ingreso=date)
p21.save()
#ELIM U6
#2017
date=datetime.datetime(2019,1,21, 13,10)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=40.03, potencia_reactiva=4.7, temperatura_promedio=0, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=0,
PDI_C1_2=0,
PDI_C2_1=0,
PDI_C2_2=0,
PDI_C3_1=0,
PDI_C3_2=0,
PDI_C4_1=0,
PDI_C4_2=0,
PDI_C5_1=0,
PDI_C5_2=0,
PDI_C6_1=0,
PDI_C6_2=0,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#ELIM U6
#2018
date=datetime.datetime(2019,02,19, 9,1)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.02 , potencia_activa=38.21, potencia_reactiva=4.4, temperatura_promedio=76.24, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=1.2,
PDI_C1_2=0,
PDI_C2_1=6.2,
PDI_C2_2=0.5,
PDI_C3_1=6,
PDI_C3_2=3.2,
PDI_C4_1=4.3,
PDI_C4_2=4,
PDI_C5_1=17.2,
PDI_C5_2=8.5,
PDI_C6_1=13.5,
PDI_C6_2=12.6,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,03,21, 7,55)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60.02 , potencia_activa=38.21, potencia_reactiva=4.4, temperatura_promedio=76.45, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=5.6,
PDI_C1_2=2.7,
PDI_C2_1=16.4,
PDI_C2_2=14.4,
PDI_C3_1=16.5,
PDI_C3_2=12.8,
PDI_C4_1=1.3,
PDI_C4_2=0,
PDI_C5_1=2,
PDI_C5_2=1.5,
PDI_C6_1=5.9,
PDI_C6_2=4,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,04,23, 18,41)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=39.14, potencia_reactiva=1.73, temperatura_promedio=77.45, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=6.7,
PDI_C1_2=3.8,
PDI_C2_1=17.9,
PDI_C2_2=11,
PDI_C3_1=16.8,
PDI_C3_2=15.4,
PDI_C4_1=3.8,
PDI_C4_2=0,
PDI_C5_1=7.9,
PDI_C5_2=1.3,
PDI_C6_1=7.9,
PDI_C6_2=3.7,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#///////////////////////////////////////////////////
#PRUEBAS DE DESCARGAS PARCIALES PARA LA UNIDAD 7 ///
#///////////////////////////////////////////////////
date=datetime.datetime(2018,1,1,16,58)
p21=Generadores(central=p1,codigo="U7-CH5N-ANDRITZ",marca="ANDRITZ HYDRO",modelo="NA",cararcteristicas="GENERADOR 40 MW",fecha_ingreso=date)
p21.save()
#ELIM U7
#2017
date=datetime.datetime(2019,01,29, 17,41)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=25.50, potencia_reactiva=-1.68, temperatura_promedio=75.17, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=1,
PDI_C1_2=0,
PDI_C2_1=5.6,
PDI_C2_2=1.4,
PDI_C3_1=7.8,
PDI_C3_2=6.2,
PDI_C4_1=4.3,
PDI_C4_2=12.2,
PDI_C5_1=15.9,
PDI_C5_2=19.6,
PDI_C6_1=43,
PDI_C6_2=17.5,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#ELIM U7
#2018
date=datetime.datetime(2019,02,27, 20,18)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=34.75, potencia_reactiva=1.98, temperatura_promedio=79.12, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=3.2,
PDI_C1_2=0,
PDI_C2_1=2.1,
PDI_C2_2=1,
PDI_C3_1=5.3,
PDI_C3_2=5.1,
PDI_C4_1=1.3,
PDI_C4_2=9.3,
PDI_C5_1=15.2,
PDI_C5_2=15.3,
PDI_C6_1=36.2,
PDI_C6_2=16.1,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,03,19, 14,5)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=60 , potencia_activa=37.37, potencia_reactiva=1.96, temperatura_promedio=79.12, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=4.4,
PDI_C1_2=9.4,
PDI_C2_1=15.2,
PDI_C2_2=20.4,
PDI_C3_1=43.4,
PDI_C3_2=18.3,
PDI_C4_1=8.4,
PDI_C4_2=0.3,
PDI_C5_1=3.8,
PDI_C5_2=1.9,
PDI_C6_1=6.6,
PDI_C6_2=5.9,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
date=datetime.datetime(2019,04,24, 19,46)
p31=Mediciones(central=p1,generador=p21,codigo_usuario="7807004",
#frecuencia=Hz, potencia_activa=Mw, potencia_reactiva=Mvar, temperatura_promedio=°C, temperatura_calent=°C, humedad_relativa=%,
frecuencia=59.96 , potencia_activa=35.93, potencia_reactiva=2.42, temperatura_promedio=79.93, temperatura_calent=0, humedad_relativa=0, CAG="SI",
PDI_C1_1=6.4,
PDI_C1_2=13.8,
PDI_C2_1=19.3,
PDI_C2_2=23,
PDI_C3_1=44.6,
PDI_C3_2=19.2,
PDI_C4_1=9.1,
PDI_C4_2=0.2,
PDI_C5_1=5.6,
PDI_C5_2=2.1,
PDI_C6_1=10.2,
PDI_C6_2=6.3,
fecha_ingreso=date,fecha_del_analisis=date)
p31.save()
#///////////////////////////////////////////////////
#PRUEBAS DE DESCARGAS PARCIALE
return render(request,'principal.html',locals())
| 35.787043
| 158
| 0.626685
| 8,809
| 55,792
| 3.672721
| 0.022704
| 0.052422
| 0.037554
| 0.046734
| 0.903935
| 0.872531
| 0.837542
| 0.822799
| 0.78546
| 0.780824
| 0
| 0.147645
| 0.24758
| 55,792
| 1,559
| 159
| 35.787043
| 0.619619
| 0.175437
| 0
| 0.67498
| 0
| 0
| 0.019868
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ceedc761c8b4d7cf69e94bedaa7d7498c85494d7
| 9,773
|
py
|
Python
|
userbot/modules/santet.py
|
bryanasfuk/Baphomet
|
bf3c3d9589511534ad848b3aa0b59e3d6b113282
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/santet.py
|
bryanasfuk/Baphomet
|
bf3c3d9589511534ad848b3aa0b59e3d6b113282
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/santet.py
|
bryanasfuk/Baphomet
|
bf3c3d9589511534ad848b3aa0b59e3d6b113282
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
from time import sleep
from userbot import CMD_HELP
from userbot.events import register
@register(outgoing=True, pattern=r"^\.santet(?: |$)(.*)")
async def typewriter(typew):
typew.pattern_match.group(1)
await typew.edit("`Telah diperintahkan untuk mengirim dosa 😈`")
sleep(2)
await typew.edit("`Mencari nama orang pendosa...`")
sleep(1)
await typew.edit("`Dosa segera di kirim`")
sleep(1)
await typew.edit("0%")
number = 1
await typew.edit(str(number) + "% ▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
sleep(1)
await typew.edit("`Berhasil menjadikan dia pendosa`")
CMD_HELP.update(
{
"santet": "`.santet`\
\nUsage: Canda Santet."
}
)
| 29.978528
| 67
| 0.48941
| 1,275
| 9,773
| 4.443137
| 0.11451
| 0.185349
| 0.259488
| 0.300088
| 0.789585
| 0.774934
| 0.774934
| 0.774934
| 0.774934
| 0.774934
| 0
| 0.056324
| 0.267881
| 9,773
| 325
| 68
| 30.070769
| 0.61174
| 0
| 0
| 0.634375
| 0
| 0
| 0.147447
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009375
| 0
| 0.009375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cefea5f6d510ef1c19a1aaaa4429d37e97b625fd
| 74
|
py
|
Python
|
park/envs/tf_placement_sim/__init__.py
|
utkarsh5k/park
|
e7eba74f532204564df42a8e82a65ed025ce3b30
|
[
"MIT"
] | 180
|
2019-04-30T05:50:32.000Z
|
2022-03-28T01:32:07.000Z
|
park/envs/tf_placement_sim/__init__.py
|
utkarsh5k/park
|
e7eba74f532204564df42a8e82a65ed025ce3b30
|
[
"MIT"
] | 21
|
2019-05-03T17:42:54.000Z
|
2022-01-25T19:31:42.000Z
|
park/envs/tf_placement_sim/__init__.py
|
utkarsh5k/park
|
e7eba74f532204564df42a8e82a65ed025ce3b30
|
[
"MIT"
] | 42
|
2019-05-01T15:15:19.000Z
|
2021-11-19T05:27:09.000Z
|
from park.envs.tf_placement_sim.tf_placement_sim import TFPlacementSimEnv
| 37
| 73
| 0.905405
| 11
| 74
| 5.727273
| 0.727273
| 0.349206
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 74
| 1
| 74
| 74
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
300de9d6e8bd38fa7bada3f4f90c08f57a78ed1b
| 105
|
py
|
Python
|
zvt/domain/meta/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | 1
|
2020-04-06T04:17:53.000Z
|
2020-04-06T04:17:53.000Z
|
zvt/domain/meta/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | 2
|
2019-12-20T13:12:30.000Z
|
2020-01-03T06:24:30.000Z
|
zvt/domain/meta/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from zvt.domain.meta.coin_meta import *
from zvt.domain.meta.stock_meta import *
| 26.25
| 40
| 0.714286
| 17
| 105
| 4.294118
| 0.588235
| 0.191781
| 0.356164
| 0.465753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.12381
| 105
| 3
| 41
| 35
| 0.782609
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
304cce355edc84c16fb17a97ddbaddf0b44f5ff1
| 12,318
|
py
|
Python
|
tests/application/FreischaltCode/test_freischaltcode_jobs.py
|
digitalservice4germany/erica
|
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
|
[
"MIT"
] | 3
|
2022-01-31T15:17:17.000Z
|
2022-03-01T16:15:47.000Z
|
tests/application/FreischaltCode/test_freischaltcode_jobs.py
|
digitalservice4germany/erica
|
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
|
[
"MIT"
] | 59
|
2022-01-31T14:04:20.000Z
|
2022-03-31T20:08:47.000Z
|
tests/application/FreischaltCode/test_freischaltcode_jobs.py
|
digitalservice4germany/erica
|
7e07d88f3db78ab6e4f7cccad8dfef2a4b3a71b2
|
[
"MIT"
] | 1
|
2022-03-10T09:24:28.000Z
|
2022-03-10T09:24:28.000Z
|
import logging
from datetime import date
from unittest.mock import patch, MagicMock, call, AsyncMock
from uuid import uuid4
import pytest
from erica.application.FreischaltCode.Jobs.jobs import request_freischalt_code, activate_freischalt_code, \
revocate_freischalt_code
from erica.application.JobService.job_service import JobService
from erica.application.JobService.job_service_factory import get_job_service
from erica.domain.FreischaltCode.FreischaltCode import FreischaltCodeActivatePayload, FreischaltCodeRequestPayload, \
FreischaltCodeRevocatePayload
from erica.domain.Shared.EricaRequest import RequestType
from erica.domain.erica_request.erica_request import EricaRequest
from erica.erica_legacy.elster_xml.xml_parsing.elster_specifics_xml_parsing import get_antrag_id_from_xml, \
get_transferticket_from_xml
from erica.erica_legacy.pyeric.pyeric_response import PyericResponse
from tests.utils import read_text_from_sample
class TestRequestFreischaltcode:
@pytest.mark.asyncio
async def test_perform_job_called_with_correct_parameters(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()) as mock_perform_job:
await request_freischalt_code(request_id)
assert mock_perform_job.mock_calls == [call(request_id=request_id,
repository=mock_get_service().repository,
service=mock_get_service(),
logger=logging.getLogger(),
payload_type=mock_get_service().payload_type)]
@pytest.mark.asyncio
async def test_get_job_service_called_with_correct_param(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()):
await request_freischalt_code(request_id)
assert mock_get_service.mock_calls == [call(RequestType.freischalt_code_request)]
@pytest.mark.asyncio
async def test_request_controller_process_called_with_correct_params(self):
req_payload = {"name": "Leon, der Profi"}
mock_req_controller = MagicMock()
mock_get_service = MagicMock(
return_value=JobService(
job_repository=MagicMock(),
background_worker=MagicMock(),
payload_type=MagicMock(parse_obj=MagicMock(return_value=req_payload)),
request_controller=mock_req_controller,
job_method=request_freischalt_code
))
with patch("erica.application.JobService.job_service_factory.get_job_service", mock_get_service), \
patch(
"erica.erica_legacy.request_processing.requests_controller.UnlockCodeRequestController", mock_req_controller):
await request_freischalt_code("1234")
assert [call(req_payload, True), call().process()] in mock_req_controller.mock_calls
class TestIntegrationWithDatabaseAndRequestFreischaltcode:
@pytest.mark.asyncio
@pytest.mark.usefixtures('async_fake_db_connection_with_erica_table_in_settings')
async def test_if_entity_in_data_base_then_set_correct_result_in_database(self):
payload = FreischaltCodeRequestPayload(tax_id_number='04452397687', date_of_birth=date(1950, 8, 16))
service = get_job_service(RequestType.freischalt_code_request)
entity = service.repository.create(EricaRequest(
request_id=uuid4(),
payload=payload,
creator_id="tests",
type=RequestType.freischalt_code_request
))
xml_string = read_text_from_sample('sample_vast_request_response.xml')
with patch('erica.erica_legacy.pyeric.pyeric_controller.UnlockCodeRequestPyericProcessController.get_eric_response',
MagicMock(return_value=PyericResponse(eric_response="eric_response", server_response=xml_string))):
await request_freischalt_code(entity.request_id)
updated_entity = service.repository.get_by_job_request_id(entity.request_id)
assert updated_entity.result == {'elster_request_id': get_antrag_id_from_xml(xml_string),
'idnr': payload.tax_id_number,
'transferticket': get_transferticket_from_xml(xml_string)}
class TestActivateFreischaltcode:
@pytest.mark.asyncio
async def test_perform_job_called_with_correct_parameters(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()) as mock_perform_job:
await activate_freischalt_code(request_id)
assert mock_perform_job.mock_calls == [call(request_id=request_id,
repository=mock_get_service().repository,
service=mock_get_service(),
logger=logging.getLogger(),
payload_type=mock_get_service().payload_type)]
@pytest.mark.asyncio
async def test_get_job_service_called_with_correct_param(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()):
await activate_freischalt_code(request_id)
assert mock_get_service.mock_calls == [call(RequestType.freischalt_code_activate)]
@pytest.mark.asyncio
async def test_request_controller_process_called_with_correct_params(self):
req_payload = {"name": "Leon, der Profi"}
mock_req_controller = MagicMock()
mock_get_service = MagicMock(
return_value=JobService(
job_repository=MagicMock(),
background_worker=MagicMock(),
payload_type=MagicMock(parse_obj=MagicMock(return_value=req_payload)),
request_controller=mock_req_controller,
job_method=activate_freischalt_code
))
with patch("erica.application.JobService.job_service_factory.get_job_service", mock_get_service), \
patch(
"erica.erica_legacy.request_processing.requests_controller.UnlockCodeActivationRequestController", mock_req_controller):
await activate_freischalt_code("1234")
assert [call(req_payload, True), call().process()] in mock_req_controller.mock_calls
class TestIntegrationWithDatabaseAndActivateFreischaltcode:
@pytest.mark.asyncio
@pytest.mark.usefixtures('async_fake_db_connection_with_erica_table_in_settings')
async def test_if_entity_in_data_base_then_set_correct_result_in_database(self):
payload = FreischaltCodeActivatePayload(tax_id_number='04452397687', freischalt_code='Alohomora', elster_request_id='br1272xf3i59m2323ft9qtk7iqzxzke4')
service = get_job_service(RequestType.freischalt_code_activate)
entity = service.repository.create(EricaRequest(
request_id=uuid4(),
payload=payload,
creator_id="tests",
type=RequestType.freischalt_code_activate
))
xml_string = read_text_from_sample('sample_vast_activation_response.xml')
with patch('erica.erica_legacy.pyeric.pyeric_controller.UnlockCodeActivationPyericProcessController.get_eric_response',
MagicMock(return_value=PyericResponse(eric_response="eric_response", server_response=xml_string))):
await activate_freischalt_code(entity.request_id)
updated_entity = service.repository.get_by_job_request_id(entity.request_id)
assert updated_entity.result == {'elster_request_id': get_antrag_id_from_xml(xml_string),
'idnr': payload.tax_id_number,
'transferticket': get_transferticket_from_xml(xml_string)}
class TestRevocateFreischaltcode:
@pytest.mark.asyncio
async def test_perform_job_called_with_correct_parameters(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()) as mock_perform_job:
await revocate_freischalt_code(request_id)
assert mock_perform_job.mock_calls == [call(request_id=request_id,
repository=mock_get_service().repository,
service=mock_get_service(),
logger=logging.getLogger(),
payload_type=mock_get_service().payload_type)]
@pytest.mark.asyncio
async def test_get_job_service_called_with_correct_param(self):
request_id = "1234"
with patch("erica.application.JobService.job_service_factory.get_job_service", MagicMock()) as mock_get_service, \
patch("erica.application.FreischaltCode.Jobs.jobs.perform_job", AsyncMock()):
await revocate_freischalt_code(request_id)
assert mock_get_service.mock_calls == [call(RequestType.freischalt_code_revocate)]
@pytest.mark.asyncio
async def test_request_controller_process_called_with_correct_params(self):
req_payload = {"name": "Leon, der Profi"}
mock_req_controller = MagicMock()
mock_get_service = MagicMock(
return_value=JobService(
job_repository=MagicMock(),
background_worker=MagicMock(),
payload_type=MagicMock(parse_obj=MagicMock(return_value=req_payload)),
request_controller=mock_req_controller,
job_method=revocate_freischalt_code
))
with patch("erica.application.JobService.job_service_factory.get_job_service", mock_get_service), \
patch(
"erica.erica_legacy.request_processing.requests_controller.UnlockCodeRevocationRequestController", mock_req_controller):
await revocate_freischalt_code("1234")
assert [call(req_payload, True), call().process()] in mock_req_controller.mock_calls
class TestIntegrationWithDatabaseAndRevocateFreischaltcode:
@pytest.mark.asyncio
@pytest.mark.usefixtures('async_fake_db_connection_with_erica_table_in_settings')
async def test_if_entity_in_data_base_then_set_correct_result_in_database(self):
payload = FreischaltCodeRevocatePayload(tax_id_number='04452397687', dob=date(1950, 8, 16), elster_request_id='br1272xf3i59m2323ft9qtk7iqzxzke4')
service = get_job_service(RequestType.freischalt_code_revocate)
entity = service.repository.create(EricaRequest(
request_id=uuid4(),
payload=payload,
creator_id="tests",
type=RequestType.freischalt_code_revocate
))
xml_string = read_text_from_sample('sample_vast_revocation_response.xml')
with patch('erica.erica_legacy.pyeric.pyeric_controller.UnlockCodeRevocationPyericProcessController.get_eric_response',
MagicMock(return_value=PyericResponse(eric_response="eric_response", server_response=xml_string))):
await revocate_freischalt_code(entity.request_id)
updated_entity = service.repository.get_by_job_request_id(entity.request_id)
assert updated_entity.result == {'elster_request_id': get_antrag_id_from_xml(xml_string),
'transferticket': get_transferticket_from_xml(xml_string)}
| 52.866953
| 159
| 0.692645
| 1,298
| 12,318
| 6.164099
| 0.108629
| 0.03937
| 0.041995
| 0.03987
| 0.807899
| 0.7974
| 0.786527
| 0.775028
| 0.755406
| 0.755406
| 0
| 0.012156
| 0.232018
| 12,318
| 232
| 160
| 53.094828
| 0.833615
| 0
| 0
| 0.711957
| 0
| 0
| 0.170658
| 0.147114
| 0
| 0
| 0
| 0
| 0.065217
| 1
| 0
| false
| 0
| 0.076087
| 0
| 0.108696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
307fddc765b304f7557ca704a4adc91080600f73
| 20,160
|
py
|
Python
|
Ion Aggregation/ollivier_HBN.py
|
ExpectozJJ/Persistent-Ollivier-Ricci-Curvature
|
508f49e9aaa9f88552b49d01b6d585df9e75d220
|
[
"Apache-2.0"
] | null | null | null |
Ion Aggregation/ollivier_HBN.py
|
ExpectozJJ/Persistent-Ollivier-Ricci-Curvature
|
508f49e9aaa9f88552b49d01b6d585df9e75d220
|
[
"Apache-2.0"
] | null | null | null |
Ion Aggregation/ollivier_HBN.py
|
ExpectozJJ/Persistent-Ollivier-Ricci-Curvature
|
508f49e9aaa9f88552b49d01b6d585df9e75d220
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import networkx as nx
import math
import matplotlib.pyplot as plt
import matplotlib
import seaborn as sns
# load GraphRicciCuravture package
from GraphRicciCurvature.OllivierRicci import OllivierRicci
from GraphRicciCurvature.FormanRicci import FormanRicci
from collections import defaultdict, Counter
vals = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
def convertpdb(filename):
f=open(filename, "r")
if f.mode == 'r':
contents = f.readlines()
#recordname = []
#atomNum = []
atomName = []
#altLoc = []
#resName = []
#chainID = []
#resNum = []
X = []
Y = []
Z = []
#occupancy = []
#betaFactor = []
element = []
#charge = []
for i in range(len(contents)):
thisLine = contents[i]
if thisLine[0:4]=='ATOM':
#recordname = np.append(recordname,thisLine[:6].strip())
#atomNum = np.append(atomNum, float(thisLine[6:11]))
atomName = np.append(atomName, thisLine[12:16])
#altLoc = np.append(altLoc,thisLine[16])
#resName = np.append(resName, thisLine[17:20].strip())
#chainID = np.append(chainID, thisLine[21])
#resNum = np.append(resNum, float(thisLine[23:26]))
X = np.append(X, float(thisLine[30:38]))
Y = np.append(Y, float(thisLine[38:46]))
Z = np.append(Z, float(thisLine[46:54]))
#occupancy = np.append(occupancy, float(thisLine[55:60]))
#betaFactor = np.append(betaFactor, float(thisLine[61:66]))
element = np.append(element,thisLine[12:14])
#print(atomName)
a = {'PRO': [{'atom': atomName, 'typ': element, 'pos': np.transpose([X,Y,Z])}]}
np.savez(filename[:-4]+".npz", **a)
def gen_graph(filename, cutoff):
data = np.load(filename, allow_pickle = True)
for item in data['PRO']:
coords = item['pos']
atoms = item['atom']
G = nx.Graph()
for i in range(len(atoms)):
G.add_node(i, atom = atoms[i], coords = coords[i])
for i in range(len(coords)):
for j in range(len(coords)):
if i!=j:
dist = np.linalg.norm(coords[i]-coords[j])
if round(dist,2) <= cutoff:
G.add_edge(i, j) #, weight = dist)
return G
x = np.round(np.arange(-1, 1.01, 0.01),2)
edge_mat = []
vertex_mat = []
for j in vals:
raw_output = []
for id in range(101):
s = str(id)
while len(s) < 4:
s = '0'+ s
print("Processing Frame ", id, ": ", j)
convertpdb("tmao/md_"+j+"_tmao_4p_tf_"+s+"_OW.pdb")
data = np.load("tmao/md_"+j+"_tmao_4p_tf_"+s+"_OW.npz", allow_pickle=True)
G = gen_graph("tmao/md_"+j+"_tmao_4p_tf_"+s+"_OW.npz", 4)
y = np.zeros(len(x))
vertices = np.zeros(G.number_of_nodes())
cnts = defaultdict(int)
if G.number_of_edges() > 0:
orc = OllivierRicci(G, alpha=0.5)
orc.compute_ricci_curvature()
e_temp, v_temp = nx.get_edge_attributes(orc.G, "ricciCurvature"), nx.get_node_attributes(orc.G, "ricciCurvature")
raw_output.append(list(v_temp.values()))
raw_output.append(list(e_temp.values()))
np.savez("tmao_raw_op_"+j+".npz", raw_output)
x = np.round(np.arange(-1, 1.01, 0.01),2)
edge_mat = []
vertex_mat = []
for j in vals:
raw_output = []
for id in range(101):
s = str(id)
while len(s) < 4:
s = '0'+ s
print("Processing Frame ", id, ": ", j)
convertpdb("urea/md_"+j+"_urea_4p_tf_"+s+"_OW.pdb")
data = np.load("urea/md_"+j+"_urea_4p_tf_"+s+"_OW.npz", allow_pickle=True)
G = gen_graph("urea/md_"+j+"_urea_4p_tf_"+s+"_OW.npz", 4)
y = np.zeros(len(x))
vertices = np.zeros(G.number_of_nodes())
cnts = defaultdict(int)
if G.number_of_edges() > 0:
orc = OllivierRicci(G, alpha=0.5)
orc.compute_ricci_curvature()
e_temp, v_temp = nx.get_edge_attributes(orc.G, "ricciCurvature"), nx.get_node_attributes(orc.G, "ricciCurvature")
raw_output.append(list(v_temp.values()))
raw_output.append(list(e_temp.values()))
np.savez("urea_raw_op_"+j+".npz", raw_output)
def plot_tmao_avg_vertex():
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(conc)):
data = np.load("tmao_raw_op_"+conc[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
e_ = []
for j in range(len(edge_data)):
for k in range(len(edge_data[j])):
e_.append(edge_data[j][k])
v_ = []
for j in range(len(vertex_data)):
for k in range(len(vertex_data[j])):
v_.append(vertex_data[j][k])
color = plt.cm.jet(i/8)
sns.kdeplot(v_, color=color, label = conc[i], gridsize=200)
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 6])
plt.xlabel("Vertex Curvature", fontsize=12)
plt.ylabel("Average Density", fontsize=12)
plt.title("$H_{2}O$ (Tmao) - TIP4P")
#plt.savefig("tmao_avg_vertex.pdf", dpi=200)
plt.show()
def plot_tmao_avg_edge():
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(conc)):
data = np.load("tmao_raw_op_"+conc[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
e_ = []
for j in range(len(edge_data)):
for k in range(len(edge_data[j])):
e_.append(edge_data[j][k])
v_ = []
for j in range(len(vertex_data)):
for k in range(len(vertex_data[j])):
v_.append(vertex_data[j][k])
color = plt.cm.jet(i/8)
sns.kdeplot(e_, color=color, label = conc[i])
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 4.5])
plt.xlabel("Edge Curvature", fontsize=12)
plt.ylabel("Average Density", fontsize=12)
plt.title("$H_{2}O$ (Tmao) - TIP4P")
plt.savefig("tmao_avg_edge.pdf", dpi=200)
plt.show()
def plot_urea_avg_vertex():
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(conc)):
data = np.load("urea_raw_op_"+conc[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
e_ = []
for j in range(len(edge_data)):
for k in range(len(edge_data[j])):
e_.append(edge_data[j][k])
v_ = []
for j in range(len(vertex_data)):
for k in range(len(vertex_data[j])):
v_.append(vertex_data[j][k])
color = plt.cm.jet(i/8)
sns.kdeplot(v_, color=color, label = conc[i])
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 6])
plt.xlabel("Vertex Curvature", fontsize=12)
plt.ylabel("Average Density", fontsize=12)
plt.title("$H_{2}O$ (Urea) - TIP4P")
plt.savefig("urea_avg_vertex.pdf", dpi=200)
plt.show()
def plot_urea_avg_edge():
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(conc)):
data = np.load("urea_raw_op_"+conc[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
e_ = []
for j in range(len(edge_data)):
for k in range(len(edge_data[j])):
e_.append(edge_data[j][k])
v_ = []
for j in range(len(vertex_data)):
for k in range(len(vertex_data[j])):
v_.append(vertex_data[j][k])
color = plt.cm.jet(i/8)
sns.kdeplot(e_, color=color, label = conc[i])
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 4.5])
plt.xlabel("Edge Curvature", fontsize=12)
plt.ylabel("Average Density", fontsize=12)
plt.title("$H_{2}O$ (Urea) - TIP4P")
plt.savefig("urea_avg_edge.pdf", dpi=200)
plt.show()
def plot_tmao_3d():
vals = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
for i in range(len(vals)):
edge_data, vertex_data = [], []
x, y = [], []
data = np.load("tmao_raw_op_"+vals[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
x = np.outer(range(0, 101), np.ones(128))
y, z = [], []
for j in range(101):
ax = sns.kdeplot(vertex_data[j])
y.append(list(ax.lines[j].get_data()[0]))
z.append(list(ax.lines[j].get_data()[1]))
fig = plt.figure(figsize=(10,5), dpi=200)
ax= fig.add_subplot(111, projection= '3d')
ax.plot_trisurf(np.reshape(y, (128*101)), np.reshape(x, (128*101)), np.reshape(z, (128*101)), cmap='jet', vmin = 0., vmax = 6.)
#fig = go.Figure(data=[go.Surface(z=z, x=x, y=y)])
#fig.update_traces(contours_z=dict(show=True, usecolormap=True,
#highlightcolor="limegreen", project_z=True))
#surf = ax.plot_surface(np.array(y), np.array(x), np.array(z),cmap='jet',linewidth=0,antialiased='True',rstride=3,cstride=3)
ax.contourf(np.array(y), np.array(x), np.array(z), 100, zdir='z', offset=-2,cmap='jet', vmin = 0., vmax = 6.)
#ax.set_title('$H_{2}O$ (Tmao) - TIP4P - '+vals[i])
ax.view_init(elev=10., azim=-120)
ax.set_xlabel('Vertex Curvature', rotation=0, labelpad=15)
ax.set_ylabel('Frame Index', rotation=0, labelpad=15)
ax.set_zlabel('Density', rotation=90,labelpad=15)
#ax.tick_params(axis='z', pad=10)
ax.zaxis.set_rotate_label(False)
ax.grid(False)
#ax.set_xlim([-0.6, 1])
ax.set_ylim([0, 100])
ax.set_zlim([-2, 6.1])
m = plt.cm.ScalarMappable(cmap=plt.cm.jet)
m.set_clim(0, 6.)
plt.colorbar(m, extend="both", shrink=0.75, pad=-0.05)
plt.tight_layout()
plt.savefig("tmao_3d_vertex_"+vals[i]+".pdf")
#fig.colorbar(surf)
plt.show()
def plot_urea_3d():
vals = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
for i in range(len(vals)):
edge_data, vertex_data = [], []
x, y = [], []
data = np.load("urea_raw_op_"+vals[i]+".npz", allow_pickle=True)
edge_data = data["arr_0"][1::2]
vertex_data = data["arr_0"][::2]
x = np.outer(range(0, 101), np.ones(128))
y, z = [], []
for j in range(101):
ax = sns.kdeplot(vertex_data[j])
y.append(list(ax.lines[j].get_data()[0]))
z.append(list(ax.lines[j].get_data()[1]))
fig = plt.figure(figsize=(10,5), dpi=200)
ax= fig.add_subplot(111, projection= '3d')
ax.plot_trisurf(np.reshape(y, (128*101)), np.reshape(x, (128*101)), np.reshape(z, (128*101)), cmap='jet', vmin = 4., vmax = 6.)
#fig = go.Figure(data=[go.Surface(z=z, x=x, y=y)])
#fig.update_traces(contours_z=dict(show=True, usecolormap=True,
#highlightcolor="limegreen", project_z=True))
#surf = ax.plot_surface(np.array(y), np.array(x), np.array(z),cmap='jet',linewidth=0,antialiased='True',rstride=3,cstride=3)
ax.contourf(np.array(y), np.array(x), np.array(z), 100, zdir='z', offset=-2,cmap='jet', vmin = 4., vmax = 6.)
#ax.set_title('$H_{2}O$ (Urea) - TIP4P - '+vals[i])
ax.view_init(elev=10., azim=-120)
ax.set_xlabel('Vertex Curvature', rotation=0, labelpad=15, fontsize=20)
ax.set_ylabel('Frame Index', rotation=0, labelpad=15, fontsize=20)
ax.set_zlabel('Density', rotation=90,labelpad=15, fontsize=20)
ax.tick_params(axis='z', pad=10)
ax.zaxis.set_rotate_label(False)
ax.grid(False)
#ax.set_xlim([-0.6, 1])
ax.set_ylim([0, 100])
ax.set_zlim([-2, 6.1])
m = plt.cm.ScalarMappable(cmap=plt.cm.jet)
m.set_clim(4., 6.)
plt.colorbar(m, extend="both", shrink=0.75, pad=-0.05)
plt.tight_layout()
plt.savefig("Urea_3d_vertex_"+vals[i]+".pdf")
#fig.colorbar(surf)
plt.show()
def plot_urea_sample():
data = np.load("urea_raw_op.npz", allow_pickle=True)
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(data["arr_0"])//2):
color = plt.cm.jet(i/(len(data["arr_0"])//2))
sns.distplot(data["arr_0"][2*i], color=color, label = conc[i], kde_kws={'linewidth': 1} , norm_hist=False, kde=True, hist=False)
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 6])
plt.title("$H_{2}O$ (Urea) - TIP4P")
plt.xlabel("Vertex Curvature", fontsize=12)
plt.ylabel("Density", fontsize=12)
plt.savefig("urea_vertex.eps", dpi=200)
plt.show()
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(vals)):
color = plt.cm.jet(i/len(vals))
sns.kdeplot(data["arr_0"][2*i+1], color=color, label = conc[i], linewidth = 1)
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 3.1])
plt.title("$H_{2}O$ (Urea) - TIP4P")
plt.xlabel("Edge Curvature", fontsize=12)
plt.ylabel("Density", fontsize=12)
plt.savefig("urea_edge.eps", dpi=200)
plt.show()
def plot_tmao_sample():
data = np.load("tmao_raw_op.npz", allow_pickle=True)
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(data["arr_0"])//2):
color = plt.cm.jet(i/(len(data["arr_0"])//2))
sns.distplot(data["arr_0"][2*i], color=color, label = conc[i], kde_kws={'linewidth': 1} , kde=True, hist=False, norm_hist=True)
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 6])
plt.xlabel("Vertex Curvature", fontsize=12)
plt.ylabel("Density", fontsize=12)
plt.title("$H_{2}O$ (Tmao) - TIP4P")
plt.savefig("tmao_vertex.eps", dpi=200)
plt.show()
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure(figsize=(5,5), dpi=150)
for i in range(len(data["arr_0"])//2):
color = plt.cm.jet(i/(len(data["arr_0"])//2))
sns.kdeplot(data["arr_0"][2*i+1], color=color, label = conc[i], linewidth = 1)
plt.xticks(fontsize=11)
plt.yticks( fontsize=11)
plt.legend()
plt.axis([-1, 1, 0, 3.1])
plt.xlabel("Edge Curvature", fontsize=12)
plt.ylabel("Density", fontsize=12)
plt.title("$H_{2}O$ (Tmao) - TIP4P")
plt.savefig("tmao_edge.eps", dpi=200)
plt.show()
x = np.round(np.arange(-1, 1.01, 0.01),2)
f, axes = plt.subplots(2, 4, figsize=(15, 5), sharex=True, sharey=True, dpi=200)
plt.xlim(-1,1)
f.add_subplot(111, frameon=False)
plt.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
plt.xlabel("Vertex Curvature", fontsize=14)
plt.ylabel("Density", fontsize=14)
for j in range(len(data["arr_0"])//2):
if j >=4:
sns.distplot(data["arr_0"][2*j], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[1][j-4], kde=False, norm_hist=True)
else:
sns.distplot(data["arr_0"][2*j], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[0][j], kde=False, norm_hist=True)
#plt.plot(x, y_, color='red')
#plt.xticks()
#plt.yticks()
for j in range(4):
axes[0][j].xaxis.set_tick_params(which='both', labelbottom=True)
axes[0][j].yaxis.set_tick_params(which='both', labelbottom=True)
axes[1][j].yaxis.set_tick_params(which='both', labelbottom=True)
#plt.ylim(0, 4)
plt.savefig("tmao_vertex_subplots.pdf", dpi=200)
#plt.show()
x = np.round(np.arange(-1, 1.01, 0.01),2)
f, axes = plt.subplots(2, 4, figsize=(15, 5), sharex=True, sharey=True, dpi=200)
plt.xlim(-1,1)
f.add_subplot(111, frameon=False)
plt.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
plt.xlabel("Edge Curvature", fontsize=14)
plt.ylabel("Density", fontsize=14)
for j in range(len(data["arr_0"])//2):
if j >=4:
sns.distplot(data["arr_0"][2*j+1], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[1][j-4], kde=False, norm_hist=True)
else:
sns.distplot(data["arr_0"][2*j+1], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[0][j], kde=False, norm_hist=True)
#plt.plot(x, y_, color='red')
#plt.xticks()
#plt.yticks()
for j in range(4):
axes[0][j].xaxis.set_tick_params(which='both', labelbottom=True)
axes[0][j].yaxis.set_tick_params(which='both', labelbottom=True)
axes[1][j].yaxis.set_tick_params(which='both', labelbottom=True)
#plt.ylim(0, 4)
plt.savefig("tmao_edge_subplots.pdf", dpi=200)
#plt.show()
def plot_tmao_des():
data = np.load("tmao_raw_op.npz", allow_pickle=True)
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure()
f, axes = plt.subplots(2,1, figsize=(5, 5), dpi=200, sharex=True)
plt.xlim(-1,1)
plt.subplots_adjust(hspace=.3)
sns.distplot(data["arr_0"][14], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[1], kde=False)
axes[1].set(xlabel="Vertex Curvature", ylabel="No. of Vertices")
sns.distplot(data["arr_0"][15], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:red", "lw": 2}, ax=axes[0], kde=False)
axes[0].xaxis.set_tick_params(which='both', labelbottom=True)
axes[0].set(xlabel="Edge Curvature", ylabel="No. of Edges")
#plt.xticks(fontsize=11)
#plt.yticks( fontsize=11)
#plt.legend()
#plt.axis([-1, 1, 0, 6])
#plt.xlabel("Vertex Curvature", fontsize=12)
#plt.ylabel("Density", fontsize=12)
#plt.title("$H_{2}O$ (Tmao) - TIP4P")
plt.tight_layout()
plt.savefig("tmao_8M.pdf", dpi=200)
conc = ["1M", "2M", "3M", "4M", "5M", "6M", "7M", "8M"]
plt.figure()
f, axes = plt.subplots(2,1, figsize=(5, 5), dpi=200, sharex=True)
plt.xlim(-1,1)
plt.subplots_adjust(hspace=.3)
sns.distplot(data["arr_0"][14], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:blue", "lw": 2}, ax=axes[1], hist=False)
axes[1].set(xlabel="Vertex Curvature", ylabel="Density")
sns.distplot(data["arr_0"][15], bins=50, hist_kws=dict(edgecolor="k", lw=.5, alpha=0.7), color='tab:blue', kde_kws={"color":"tab:blue", "lw": 2}, ax=axes[0], hist=False)
axes[0].xaxis.set_tick_params(which='both', labelbottom=True)
axes[0].set(xlabel="Edge Curvature", ylabel="Density")
#plt.xticks(fontsize=11)
#plt.yticks( fontsize=11)
#plt.legend()
#plt.axis([-1, 1, 0, 6])
#plt.xlabel("Vertex Curvature", fontsize=12)
#plt.ylabel("Density", fontsize=12)
#plt.title("$H_{2}O$ (Tmao) - TIP4P")
plt.tight_layout()
plt.savefig("tmao_8M_density.pdf", dpi=200)
| 41.059063
| 204
| 0.557788
| 3,036
| 20,160
| 3.582675
| 0.097497
| 0.024455
| 0.02942
| 0.018204
| 0.869909
| 0.861175
| 0.845362
| 0.843155
| 0.814747
| 0.787349
| 0
| 0.054429
| 0.246329
| 20,160
| 491
| 205
| 41.059063
| 0.661445
| 0.095883
| 0
| 0.713528
| 0
| 0
| 0.108333
| 0.002602
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029178
| false
| 0
| 0.023873
| 0
| 0.055703
| 0.005305
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06a4d345ccc9f526a8f89363814da61ba230de56
| 10,090
|
py
|
Python
|
EZclim/create_test_data_files.py
|
msc-acse/acse-9-independent-research-project-AdannaAkwats
|
3a46de2e95b35ef095d2376e7237fad14e058b3e
|
[
"MIT"
] | null | null | null |
EZclim/create_test_data_files.py
|
msc-acse/acse-9-independent-research-project-AdannaAkwats
|
3a46de2e95b35ef095d2376e7237fad14e058b3e
|
[
"MIT"
] | null | null | null |
EZclim/create_test_data_files.py
|
msc-acse/acse-9-independent-research-project-AdannaAkwats
|
3a46de2e95b35ef095d2376e7237fad14e058b3e
|
[
"MIT"
] | 5
|
2019-06-21T16:14:28.000Z
|
2021-01-22T07:53:54.000Z
|
"""
Created by Adanna Akwataghibe (Github: AdannaAkwats)
"""
from netCDF4 import Dataset
import time
import numpy as np
from numpy.random import uniform, weibull
import directories
import os
"""
Script that creates example NetCDF files to use when testing
"""
def ex(year, monthly=False):
"""
Create file with daily data for specific year >= 2000, one variable in file
:param year: year of file
:param monthly: if set to true, then creates with monthly data
"""
assert(year >= 2000)
# time - 365 daily , 2000 - 2005
file_name = 'ex1_ens101_' + str(year) + '.nc'
if monthly:
file_name = 'ex1monthly_ens101_' + str(year) + '.nc'
file_name = os.path.join(directories.DATA, file_name)
dataset = Dataset(file_name, 'w', format='NETCDF4_CLASSIC')
# Create datasets
dataset.createDimension('latitude', 10)
dataset.createDimension('longitude', 20)
dataset.createDimension('time', None)
times = dataset.createVariable('time', np.float64, ('time',))
latitudes = dataset.createVariable('latitude', np.float64, ('latitude',))
longitudes = dataset.createVariable('longitude', np.float64, ('longitude',))
# Create the 3D variable
temp = dataset.createVariable('temp', np.float32, ('time', 'latitude', 'longitude'), fill_value=-1.e+20)
# Global Attributes
dataset.description = 'Example NetCDF file'
dataset.history = 'Created by Adanna Akwataghibe ' + time.ctime(time.time())
dataset.source = 'netCDF4 python module tutorial'
# Variable Attributes
latitudes.units = 'degree_north'
longitudes.units = 'degree_east'
longitudes.long_name = 'longitude'
latitudes.long_name = 'latitude'
times.calendar = 'NOLEAP'
times.calendar_type = 'NOLEAP'
times.units = 'days since 2000-01-01 00:00:00'
times.long_name = 'time'
temp.units = 'K'
temp.long_name = 'temperature'
# Fill in lons and lats
lats = np.linspace(-90, 90, 10)
lons = np.linspace(-180, 180, 20)
latitudes[:] = lats
longitudes[:] = lons
nlats = len(dataset.dimensions['latitude'])
nlons = len(dataset.dimensions['longitude'])
if not monthly:
temp[:, :, :] = uniform(size=(365, nlats, nlons))
else:
temp[:, :, :] = uniform(size=(12, nlats, nlons))
if not monthly:
times[:] = np.asarray(range(365)) + 365 * (year - 2000)
else:
t = np.cumsum([0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30])
times[:] = t + 365 * (year - 2000)
print("New file " + file_name + " created in " + directories.DATA + " directory.")
dataset.close()
def ex_level(year, swap=False):
"""
Create file with daily data for specific year >= 2000, one variable in file, added level to dimensions
:param year: year of file
:param swap: if swap, then change time and level dim
"""
assert (year >= 2000)
# time - 365 daily , 2000 - 2005
file_name = 'ex1level_ens101_' + str(year) + '.nc'
file_name = os.path.join(directories.DATA, file_name)
dataset = Dataset(file_name, 'w', format='NETCDF4_CLASSIC')
# Create datasets
dataset.createDimension('level', 5)
dataset.createDimension('latitude', 10)
dataset.createDimension('longitude', 20)
dataset.createDimension('time', None)
levels = dataset.createVariable('level', np.float64, ('level',))
times = dataset.createVariable('time', np.float64, ('time',))
latitudes = dataset.createVariable('latitude', np.float64, ('latitude',))
longitudes = dataset.createVariable('longitude', np.float64, ('longitude',))
# Create the 3D variable
if not swap:
temp = dataset.createVariable('temp', np.float32, ('level', 'time', 'latitude', 'longitude'), fill_value=-1.e+20)
else:
temp = dataset.createVariable('temp', np.float32, ('time', 'level', 'latitude', 'longitude'), fill_value=-1.e+20)
# Global Attributes
dataset.description = 'Example NetCDF file'
dataset.history = 'Created by Adanna Akwataghibe ' + time.ctime(time.time())
dataset.source = 'netCDF4 python module tutorial'
# Variable Attributes
latitudes.units = 'degree_north'
longitudes.units = 'degree_east'
longitudes.long_name = 'longitude'
latitudes.long_name = 'latitude'
times.calendar = 'NOLEAP'
times.calendar_type = 'NOLEAP'
times.units = 'days since 2000-01-01 00:00:00'
times.long_name = 'time'
temp.units = 'K'
temp.long_name = 'temperature'
levels.units = "hPa"
# Fill in lons and lats
lats = np.linspace(-90, 90, 10)
lons = np.linspace(-180, 180, 20)
latitudes[:] = lats
longitudes[:] = lons
levels[:] = [100, 75, 50, 25, 0]
nlats = len(dataset.dimensions['latitude'])
nlons = len(dataset.dimensions['longitude'])
if not swap:
temp[0:5, :, :, :] = uniform(size=(5, 365, nlats, nlons))
else:
temp[:, :, :, :] = uniform(size=(365, 5, nlats, nlons))
times[:] = np.asarray(range(365)) + 365 * (year - 2000)
print("New file " + file_name + " created in " + directories.DATA + " directory.")
dataset.close()
def ex2(year):
"""
Create file with daily data for specific year >= 2000, two variables in file
:param year: year of file
"""
assert (year >= 2000)
# time - 365 daily , from 2000, 2 variables
file_name = 'ex2_ens101_' + str(year) + '.nc'
file_name = os.path.join(directories.DATA, file_name)
dataset = Dataset(file_name, 'w', format='NETCDF4_CLASSIC')
# Create datasets
dataset.createDimension('latitude', 10)
dataset.createDimension('longitude', 20)
dataset.createDimension('time', None)
times = dataset.createVariable('time', np.float64, ('time',))
latitudes = dataset.createVariable('latitude', np.float64, ('latitude',))
longitudes = dataset.createVariable('longitude', np.float64, ('longitude',))
# Create the 3D variable
temp = dataset.createVariable('temp', np.float32, ('time', 'latitude', 'longitude'), fill_value=-1.e+20)
sal = dataset.createVariable('sal', np.float32, ('time', 'latitude', 'longitude'), fill_value=-1.e+20)
# Global Attributes
dataset.description = 'Example NetCDF file'
dataset.history = 'Created by Adanna Akwataghibe ' + time.ctime(time.time())
dataset.source = 'netCDF4 python module tutorial'
# Variable Attributes
latitudes.units = 'degree_north'
longitudes.units = 'degree_east'
longitudes.long_name = 'longitude'
latitudes.long_name = 'latitude'
times.calendar = 'NOLEAP'
times.calendar_type = 'NOLEAP'
times.units = 'days since 2000-01-01 00:00:00'
times.long_name = 'time'
temp.units = 'K'
temp.long_name = 'temperature'
sal.long_name = 'salinity'
# Fill in lons and lats
lats = np.linspace(-90, 90, 10)
lons = np.linspace(-180, 180, 20)
latitudes[:] = lats
longitudes[:] = lons
nlats = len(dataset.dimensions['latitude'])
nlons = len(dataset.dimensions['longitude'])
temp[:, :, :] = uniform(size=(365, nlats, nlons))
sal[:, :, :] = weibull(5, size=(365, nlats, nlons))
times[:] = np.asarray(range(365)) + 365 * (year - 2000)
print("New file " + file_name + " created in " + directories.DATA + " directory.")
dataset.close()
def ex2_level(year):
"""
Create file with daily data for specific year >= 2000, two variables in file, added level in dimensions
:param year: year of file
"""
assert (year >= 2000)
# time - 365 daily , from 2000, 2 variables
file_name = 'ex2level_ens101_' + str(year) + '.nc'
file_name = os.path.join(directories.DATA, file_name)
dataset = Dataset(file_name, 'w', format='NETCDF4_CLASSIC')
# Create datasets
dataset.createDimension('level', 5)
dataset.createDimension('latitude', 10)
dataset.createDimension('longitude', 20)
dataset.createDimension('time', None)
levels = dataset.createVariable('level', np.float64, ('level',))
times = dataset.createVariable('time', np.float64, ('time',))
latitudes = dataset.createVariable('latitude', np.float64, ('latitude',))
longitudes = dataset.createVariable('longitude', np.float64, ('longitude',))
# Create the 3D variable
temp = dataset.createVariable('temp', np.float32, ('level', 'time', 'latitude', 'longitude'), fill_value=-1.e+20)
sal = dataset.createVariable('sal', np.float32, ('level', 'time', 'latitude', 'longitude'), fill_value=-1.e+20)
# Global Attributes
dataset.description = 'Example NetCDF file'
dataset.history = 'Created by Adanna Akwataghibe ' + time.ctime(time.time())
dataset.source = 'netCDF4 python module tutorial'
# Variable Attributes
latitudes.units = 'degree_north'
longitudes.units = 'degree_east'
longitudes.long_name = 'longitude'
latitudes.long_name = 'latitude'
times.calendar = 'NOLEAP'
times.calendar_type = 'NOLEAP'
times.units = 'days since 2000-01-01 00:00:00'
times.long_name = 'time'
temp.missing_value = -1.e+20
temp.units = 'K'
temp.long_name = 'temperature'
sal.long_name = 'salinity'
levels.units = "hPa"
# Fill in lons and lats
lats = np.linspace(-90, 90, 10)
lons = np.linspace(-180, 180, 20)
latitudes[:] = lats
longitudes[:] = lons
levels[:] = [100, 75, 50, 25, 0]
nlats = len(dataset.dimensions['latitude'])
nlons = len(dataset.dimensions['longitude'])
temp[:, :, :, :] = uniform(size=(5, 365, nlats, nlons))
sal[:, :, :, :] = weibull(5, size=(5, 365, nlats, nlons))
times[:] = np.asarray(range(365)) + 365 * (year - 2000)
print("New file " + file_name + " created in " + directories.DATA + " directory.")
dataset.close()
# ------------------------------- CREATE FILES -----------------------------------
ex(2000)
# ex(2001)
# ex(2002)
# ex(2003)
#
# ex_level(2000, swap=True)
# ex2_level(2000)
# ex(2000, monthly=True)
# ex(2001, monthly=True)
# ex(2002, monthly=True)
# ex(2003, monthly=True)
# ex2(2000)
# ex2(2001)
# ex2(2002)
# ex2(2003)
# ex2(2000)
# ex2(2001)
# ex2(2002)
# ex2(2003)
| 33.190789
| 121
| 0.64113
| 1,230
| 10,090
| 5.196748
| 0.128455
| 0.026283
| 0.008761
| 0.011264
| 0.896433
| 0.896277
| 0.88423
| 0.855914
| 0.84199
| 0.84199
| 0
| 0.062461
| 0.201883
| 10,090
| 304
| 122
| 33.190789
| 0.73128
| 0.148365
| 0
| 0.836158
| 0
| 0
| 0.184938
| 0
| 0
| 0
| 0
| 0
| 0.022599
| 1
| 0.022599
| false
| 0
| 0.033898
| 0
| 0.056497
| 0.022599
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ebeb69f5fff34b70411f58ec7f75e6fc88d4571b
| 154
|
py
|
Python
|
intprim/filter/__init__.py
|
souljaboy764/intprim
|
ecf905ce69dc14215230be3b3819d2236223e9ba
|
[
"MIT"
] | 2
|
2021-09-25T21:56:49.000Z
|
2022-01-31T07:49:25.000Z
|
intprim/filter/__init__.py
|
souljaboy764/intprim
|
ecf905ce69dc14215230be3b3819d2236223e9ba
|
[
"MIT"
] | null | null | null |
intprim/filter/__init__.py
|
souljaboy764/intprim
|
ecf905ce69dc14215230be3b3819d2236223e9ba
|
[
"MIT"
] | null | null | null |
from intprim.filter.spatiotemporal import *
from intprim.filter.align import *
from intprim.filter.linear_system import *
from intprim.filter.kf import *
| 30.8
| 43
| 0.818182
| 21
| 154
| 5.952381
| 0.428571
| 0.352
| 0.544
| 0.552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 154
| 4
| 44
| 38.5
| 0.905797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ebf9faab9bd6b0c9a35e154a1162a2c1d9018056
| 32
|
py
|
Python
|
AULA 11 - Cores no terminal/exemplo 03.py
|
luizhmfonseca/Estudos-Python
|
4ed50835a1fd68d3066fcfa57355e48a2f5e4978
|
[
"MIT"
] | 2
|
2021-03-15T22:07:59.000Z
|
2021-11-09T11:52:37.000Z
|
AULA 11 - Cores no terminal/exemplo 03.py
|
luizhmfonseca/Estudos-Python
|
4ed50835a1fd68d3066fcfa57355e48a2f5e4978
|
[
"MIT"
] | null | null | null |
AULA 11 - Cores no terminal/exemplo 03.py
|
luizhmfonseca/Estudos-Python
|
4ed50835a1fd68d3066fcfa57355e48a2f5e4978
|
[
"MIT"
] | null | null | null |
print('\33[1;31;43mOlá, Mundo!')
| 32
| 32
| 0.65625
| 6
| 32
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 0.03125
| 32
| 1
| 32
| 32
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
234ae3e48c68056b0deab6712e3d717ab3a836da
| 12,869
|
py
|
Python
|
dexs/one_inch.py
|
mjmj/dexy
|
0fa9d40a3e58ed6e576aa7f4abd44995d9333a5f
|
[
"MIT"
] | 2
|
2021-08-14T03:00:35.000Z
|
2021-08-14T03:10:08.000Z
|
dexs/one_inch.py
|
mjmj/dexy
|
0fa9d40a3e58ed6e576aa7f4abd44995d9333a5f
|
[
"MIT"
] | null | null | null |
dexs/one_inch.py
|
mjmj/dexy
|
0fa9d40a3e58ed6e576aa7f4abd44995d9333a5f
|
[
"MIT"
] | 1
|
2021-08-14T03:17:44.000Z
|
2021-08-14T03:17:44.000Z
|
# v2 addr
one_inch = {
'exchange_addr': '0xC586BeF4a0992C495Cf22e1aeEE4E446CECDee0E',
}
one_inch_abi = """
[
{
"inputs": [
{
"internalType": "contract IOneSplit",
"name": "impl",
"type": "address"
}
],
"payable": false,
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "newImpl",
"type": "address"
}
],
"name": "ImplementationUpdated",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "previousOwner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "OwnershipTransferred",
"type": "event"
},
{
"payable": true,
"stateMutability": "payable",
"type": "fallback"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_AAVE",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_BANCOR",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_BDAI",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CHAI",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_COMPOUND",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CURVE_BINANCE",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CURVE_COMPOUND",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CURVE_SYNTHETIX",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CURVE_USDT",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_CURVE_Y",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_FULCRUM",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_IEARN",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_KYBER",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_OASIS",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_SMART_TOKEN",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_UNISWAP",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_DISABLE_WETH",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_KYBER_BANCOR_RESERVE",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_KYBER_OASIS_RESERVE",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_KYBER_UNISWAP_RESERVE",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_MULTI_PATH_DAI",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_MULTI_PATH_ETH",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_MULTI_PATH_USDC",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "FLAG_ENABLE_UNISWAP_COMPOUND",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "contract IERC20",
"name": "asset",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
}
],
"name": "claimAsset",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "contract IERC20",
"name": "fromToken",
"type": "address"
},
{
"internalType": "contract IERC20",
"name": "toToken",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "parts",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "featureFlags",
"type": "uint256"
}
],
"name": "getExpectedReturn",
"outputs": [
{
"internalType": "uint256",
"name": "returnAmount",
"type": "uint256"
},
{
"internalType": "uint256[]",
"name": "distribution",
"type": "uint256[]"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "isOwner",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "oneSplitImpl",
"outputs": [
{
"internalType": "contract IOneSplit",
"name": "",
"type": "address"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "owner",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": false,
"inputs": [],
"name": "renounceOwnership",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "contract IOneSplit",
"name": "impl",
"type": "address"
}
],
"name": "setNewImpl",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "contract IERC20",
"name": "fromToken",
"type": "address"
},
{
"internalType": "contract IERC20",
"name": "toToken",
"type": "address"
},
{
"internalType": "uint256",
"name": "amount",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "minReturn",
"type": "uint256"
},
{
"internalType": "uint256[]",
"name": "distribution",
"type": "uint256[]"
},
{
"internalType": "uint256",
"name": "featureFlags",
"type": "uint256"
}
],
"name": "swap",
"outputs": [],
"payable": true,
"stateMutability": "payable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "transferOwnership",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
}
]
"""
| 21.062193
| 66
| 0.407724
| 775
| 12,869
| 6.67871
| 0.105806
| 0.078632
| 0.151082
| 0.167697
| 0.88524
| 0.860896
| 0.848532
| 0.819938
| 0.743238
| 0.709621
| 0
| 0.030844
| 0.407957
| 12,869
| 610
| 67
| 21.096721
| 0.64851
| 0.000544
| 0
| 0.633663
| 0
| 0
| 0.996034
| 0.049308
| 0
| 0
| 0.003266
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
88ba353606d1a250dc1624d6ed1bbc3a73ab2357
| 5,156
|
py
|
Python
|
AjkSpider/tools/db_new.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
AjkSpider/tools/db_new.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
AjkSpider/tools/db_new.py
|
Justyer/AnjukeSpider
|
495c036a4c2236a11da9b17f75ddffd9f2fab2f6
|
[
"MIT"
] | null | null | null |
import psycopg2
conn = psycopg2.connect(database='ptd', user='postgres', password='495495', host='127.0.0.1', port='5432')
cur = conn.cursor()
cur.execute('''
create table proxy(
id serial primary key,
ip varchar(255) not null
)
''')
# cur.execute('''
# create table test(
# id serial primary key,
# t1 varchar(255) not null,
# t2 bigint not null
# )
# ''')
# cur.execute('''
# create table lj_city(
# id serial primary key,
# cn_name varchar(255) not null,
# route varchar(255) not null,
# url varchar(255) not null
# )
# ''')
#
# cur.execute('''
# create table lj_district(
# id serial primary key,
# cn_name varchar(255) not null,
# route varchar(255) not null,
# city_id bigint not null
# )
# ''')
# cur.execute('''
# create table lj_community(
# id serial primary key,
# cn_name varchar(255) not null,
# route varchar(255) not null,
# district_id bigint not null
# )
# ''')
# cur.execute('''
# create table lj_residence(
# id serial primary key,
#
# name varchar(255) not null,
# avg_price varchar(255) not null,
# avg_time varchar(255) not null,
# address varchar(255) not null,
# coordinate varchar(255) not null,
# build_time varchar(255) not null,
# property_price varchar(255) not null,
# property_company varchar(255) not null,
# developer varchar(255) not null,
# floor_sum varchar(255) not null,
# house_sum varchar(255) not null,
#
# esf_url varchar(255) not null,
# deal_url varchar(255) not null,
# url varchar(255) not null,
# crawl_time varchar(255) not null,
# community_id bigint not null
# )
# ''')
#
# cur.execute('''
# create table lj_residence_around(
# id serial primary key,
#
# title varchar(255) not null,
# description varchar(255) not null,
# distance varchar(255) not null,
# type2 varchar(255) not null,
# type1 varchar(255) not null,
#
# url varchar(255) not null,
# crawl_time varchar(255) not null,
# residence_id bigint not null
# )
# ''')
# cur.execute('''
# create table lj_esf(
# id serial primary key,
#
# house_type varchar(255) not null,
# orientation varchar(255) not null,
# area varchar(255) not null,
# inner_area varchar(255) not null,
# heating_style varchar(255) not null,
# decoration varchar(255) not null,
# floor varchar(255) not null,
# house_type_struct varchar(255) not null,
# build_type varchar(255) not null,
# build_struct varchar(255) not null,
# household varchar(255) not null,
# elevator varchar(255) not null,
#
# ring_num varchar(255) not null,
# lj_num varchar(255) not null,
# broker varchar(255) not null,
#
# house_age varchar(255) not null,
# transaction_owner varchar(255) not null,
# use varchar(255) not null,
# house_owner varchar(255) not null,
# listing_time varchar(255) not null,
# listing_price varchar(255) not null,
# unit_price varchar(255) not null,
# last_deal varchar(255) not null,
# mortgage varchar(255) not null,
# house_backup varchar(255) not null,
#
# url varchar(255) not null,
# crawl_time varchar(255) not null,
# residence_name varchar(255) not null,
# residence_id bigint not null
# )
# ''')
#
# cur.execute('''
# create table lj_deal(
# id serial primary key,
#
# house_type varchar(255) not null,
# orientation varchar(255) not null,
# area varchar(255) not null,
# inner_area varchar(255) not null,
# heating_style varchar(255) not null,
# decoration varchar(255) not null,
# floor varchar(255) not null,
# house_type_struct varchar(255) not null,
# build_type varchar(255) not null,
# build_struct varchar(255) not null,
# household varchar(255) not null,
# elevator varchar(255) not null,
#
# house_age varchar(255) not null,
# transaction_owner varchar(255) not null,
# use varchar(255) not null,
# house_owner varchar(255) not null,
# listing_time varchar(255) not null,
# listing_price varchar(255) not null,
# deal_price varchar(255) not null,
# deal_time varchar(255) not null,
# last_deal varchar(255) not null,
# deal_cycle varchar(255) not null,
# look_times varchar(255) not null,
#
# url varchar(255) not null,
# crawl_time varchar(255) not null,
# residence_url varchar(255) not null,
# residence_id bigint not null,
#
# )
# ''')
print 'db_new ok'
conn.commit()
cur.close()
conn.close()
| 30.690476
| 107
| 0.56827
| 631
| 5,156
| 4.537242
| 0.152139
| 0.224939
| 0.385959
| 0.504715
| 0.806846
| 0.69263
| 0.682152
| 0.681453
| 0.648271
| 0.613692
| 0
| 0.079621
| 0.325252
| 5,156
| 167
| 108
| 30.874252
| 0.743317
| 0.848138
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.076923
| 0.076923
| null | null | 0.076923
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
88c4f0cf0c4b6a8db52dc329fe7f461f1aad8272
| 25,792
|
py
|
Python
|
src/LLS_search_patterns.py
|
OscarCunningham/logic-life-search
|
9464316ccecef835afc948d19244a9864f1948e1
|
[
"MIT"
] | 22
|
2018-01-28T12:51:33.000Z
|
2021-05-12T14:04:26.000Z
|
src/LLS_search_patterns.py
|
OscarCunningham/logic-life-search
|
9464316ccecef835afc948d19244a9864f1948e1
|
[
"MIT"
] | 2
|
2018-02-03T02:11:02.000Z
|
2019-01-30T13:37:05.000Z
|
src/LLS_search_patterns.py
|
OscarCunningham/logic-life-search
|
9464316ccecef835afc948d19244a9864f1948e1
|
[
"MIT"
] | 4
|
2018-02-28T04:13:28.000Z
|
2021-10-01T19:31:43.000Z
|
from SearchPattern import SearchPattern
from LLS_messages import print_message
import LLS_files
import LLS_formatting
import copy
import itertools
import re
from LLS_literal_manipulation import neighbour_indices_from_coordinates, variable_from_literal, negate
def search_pattern_from_string(input_string, indent = 0, verbosity = 0):
grid, ignore_transition = LLS_formatting.parse_input_string(input_string, indent = indent, verbosity = verbosity)
print_message("Pattern parsed as:\n" + LLS_formatting.make_csv(grid, ignore_transition) + "\n", 3, indent = indent, verbosity = verbosity)
for t, generation in enumerate(grid):
for y, row in enumerate(generation):
for x, cell in enumerate(row):
if cell not in ["0", "1", "*"]:
variable, negated = variable_from_literal(cell)
grid[t][y][x] = negate("user_input_" + variable, negated)
return grid, ignore_transition
def blank_search_pattern(x_bound, y_bound, duration, indent = 0, verbosity = 0):
print_message('Creating spaceship search pattern...', 3, indent = indent, verbosity = verbosity)
width = x_bound + 2
height = y_bound + 2
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for t in range(duration):
for y in range(y_bound):
for x in range(x_bound):
grid[t][y+1][x+1] = "*"
print_message("Pattern created:\n" + LLS_formatting.make_csv(grid) + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return grid
def crawler_search_pattern(x_bound, y_bound, x_translate, y_translate, period, offset = False, indent = 0, verbosity = 0):
print_message('Creating agar crawler search pattern...', 3, indent = indent, verbosity = verbosity)
width = x_translate + x_bound + 4
height = y_translate + y_bound + 4
duration = period + 1
if offset:
booleans = ("0","1")
else:
booleans = ("1","0")
grid = [[[booleans[0] if y % 2 == 0 else booleans[1] for x in range(
width)] for y in range(height)] for generation in range(duration)]
ignore_transition = [[[False for x in range(
width)] for y in range(height)] for generation in range(duration)]
for x in range(width):
for y in range(height):
for t in range(duration):
if x in [0, width-1] or y in [0, height - 1]:
ignore_transition[t][y][x] = True
if x in range(2,width - 2) and y in range(2, height - 2):
grid[t][y][x] = "*"
search_pattern = SearchPattern(grid,ignore_transition = ignore_transition)
search_pattern.standardise_varaibles_names()
to_force_equal = []
for t, generation in enumerate(search_pattern.grid):
for y, row in enumerate(generation):
for x, cell in enumerate(row):
if t - period in range(duration):
if x - x_translate in range(width) and y - y_translate in range(height):
to_force_equal.append((search_pattern.grid[t][y][x], search_pattern.grid[t - period][y - y_translate][x - x_translate]))
else:
to_force_equal.append((search_pattern.grid[t][y][x], booleans[0] if y % 2 == 0 else booleans[1]))
if t + period in range(duration):
if x + x_translate in range(width) and y + y_translate in range(height):
to_force_equal.append((search_pattern.grid[t][y][x], search_pattern.grid[t + period][y + y_translate][x + x_translate]))
else:
to_force_equal.append((search_pattern.grid[t][y][x], booleans[0] if y % 2 == 0 else booleans[1]))
search_pattern.force_equal(to_force_equal)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def check_orphan(file_name, number_of_generations, indent = 0, verbosity = 0):
print_message('Creating search pattern to see if file "' + file_name + '" contains an orphan...', 3, indent = indent, verbosity = verbosity)
input_string = LLS_files.string_from_file(file_name, indent = indent + 1, verbosity = verbosity)
grid, _ = LLS_formatting.parse_input_string(input_string, indent = indent + 1, verbosity = verbosity)
assert len(grid) == 1, "More than one generation in input"
print_message("Pattern parsed as:\n" + LLS_formatting.make_csv(grid) + "\n", 3, indent = indent+1, verbosity = verbosity)
pattern = grid[0]
width = len(pattern[0])
height = len(pattern)
padding = number_of_generations
pattern = [["*" for columns in range(padding)] + row + ["*" for columns in range(padding)] for row in [["*" for cell in range(
width)] for rows in range(padding)] + pattern + [["*" for cell in range(width)] for rows in range(padding)]]
width = len(pattern[0])
height = len(pattern)
duration = number_of_generations + 1
grid = [[["0" for cell in range(width)] for row in range(
height)] for generation in range(number_of_generations)] + [pattern]
ignore_transition = [[[True for cell in range(width)] for row in range(
height)] for generation in range(number_of_generations)] + [[[False for cell in range(width)] for row in range(
height)]]
for t in range(number_of_generations - 1, -1, -1):
for y in range(height):
for x in range(width):
neighbours = [grid[neighbour_t][neighbour_y][neighbour_x]
for neighbour_x, neighbour_y, neighbour_t
in neighbour_indices_from_coordinates(width,height,x,y,t,t_offset=1)]
if t == number_of_generations - 1:
nonempties = ["0", "1"]
else:
nonempties = ["*"]
if any(nonempty in neighbours for nonempty in nonempties):
grid[t][y][x] = "*"
ignore_transition[t][y][x] = False
for y in range(height):
for x in range(width):
if grid[-1][y][x] not in ["0", "1"]:
grid[-1][y][x] = "0"
ignore_transition[-1][y][x] = True
print_message("Search pattern:\n" + LLS_formatting.make_csv(grid, ignore_transition) + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return grid, ignore_transition
def glider_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating eater search pattern...', 3, indent = indent, verbosity = verbosity)
glider_in = [
[["1","0","0","0","0"],
["0","1","1","0","0"],
["1","1","0","0","0"],
["0","0","0","0","0"],
["0","0","0","0","0"]],
[["0","1","0","0","0"],
["0","0","1","0","0"],
["1","1","1","0","0"],
["0","0","0","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["1","0","1","0","0"],
["0","1","1","0","0"],
["0","1","0","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","0","1","0","0"],
["1","0","1","0","0"],
["0","1","1","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","1","0","0","0"],
["0","0","1","1","0"],
["0","1","1","0","0"],
["0","0","0","0","0"]],
[["0","0","0","0","0"],
["0","0","1","0","0"],
["0","0","0","1","0"],
["0","1","1","1","0"],
["0","0","0","0","0"]]
]
width = width + 2
height = height + 2
duration = digestion_time + 6
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(5):
for y in range(5):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(glider_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(glider_in):
for y, row in enumerate(generation):
for x, cell in enumerate(row):
search_pattern.grid[t][y][x] = cell
for t in range(6,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def lwss_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating lwss search pattern...', 3, indent = indent, verbosity = verbosity)
assert height % 2 == 1, "Height must be odd"
lwss_in = [
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","1","0","0","0","0","0"],
["0","1","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["0","1","1","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","1","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0"],
["1","1","1","0","0","0","0"],
["1","0","1","1","0","0","0"],
["0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","1","0","0","0","0"],
["0","0","0","1","0","0","0"],
["0","0","0","1","0","0","0"],
["1","1","1","1","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0"],
["1","1","0","1","1","0","0"],
["1","1","1","1","0","0","0"],
["0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"],
["0","1","1","1","1","0","0"],
["1","0","0","0","1","0","0"],
["0","0","0","0","1","0","0"],
["1","0","0","1","0","0","0"],
["0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0"],
["0","1","1","1","1","0","0"],
["0","1","1","0","1","1","0"],
["0","0","0","1","1","0","0"],
["0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0"]]
]
in_time = len(lwss_in)
width = width + 2
height = height + 2
duration = digestion_time + in_time
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(7):
for y in range(height/2 - 3, height/2 + 4):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(lwss_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(lwss_in):
for y, row in enumerate(generation):
if y not in [0, len(generation) - 1]:
search_pattern.ignore_transition[t][height/2 - 3 + y][0] = True
for x, cell in enumerate(row):
search_pattern.grid[t][height/2 - 3 + y][x] = cell
for t in range(in_time,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
def hwss_eater_search_pattern(width,height,digestion_time,symmetry="C1",indent = 0, verbosity = 0):
print_message('Creating lwss search pattern...', 3, indent = indent, verbosity = verbosity)
assert height % 2 == 1, "Height must be odd"
hwss_in = [
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["1","0","1","1","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["1","0","0","0","0","0","0","0","0"],
["0","0","1","0","0","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","1","1","0","0","0","0","0"],
["1","1","0","1","1","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["1","1","1","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","1","1","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","1","0","0","0","0","0"],
["1","1","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["1","1","1","1","0","0","0","0","0"],
["1","1","1","1","1","0","0","0","0"],
["1","1","1","0","1","1","0","0","0"],
["0","0","0","1","1","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","1","1","0","0","0","0","0","0"],
["0","0","0","0","1","0","0","0","0"],
["0","0","0","0","0","1","0","0","0"],
["0","0","0","0","0","1","0","0","0"],
["1","1","1","1","1","1","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","1","1","0","0","0"],
["1","1","1","1","0","1","1","0","0"],
["1","1","1","1","1","1","0","0","0"],
["0","1","1","1","1","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","1","1","1","1","1","1","0","0"],
["1","0","0","0","0","0","1","0","0"],
["0","0","0","0","0","0","1","0","0"],
["1","0","0","0","0","1","0","0","0"],
["0","0","1","1","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]],
[["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","1","1","1","1","0","0","0"],
["0","1","1","1","1","1","1","0","0"],
["0","1","1","1","1","0","1","1","0"],
["0","0","0","0","0","1","1","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"],
["0","0","0","0","0","0","0","0","0"]]
]
in_time = len(hwss_in)
width = width + 2
height = height + 2
duration = digestion_time + in_time
grid = [[["0"
for i in range(width)] for j in range(height)] for k in range(duration)]
for y in range(1,height-1):
for x in range(1,width-1):
grid[-1][y][x] = "*"
search_pattern = SearchPattern(grid)
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
for x in range(9):
for y in range(height/2 - 4, height/2 + 5):
search_pattern.grid[-1][y][x] = "0"
search_pattern.force_symmetry(symmetry)
for t, generation in enumerate(hwss_in):
search_pattern.grid[t] = copy.deepcopy(search_pattern.grid[-1])
for t, generation in enumerate(hwss_in):
for y, row in enumerate(generation):
if y not in [0, len(generation) - 1]:
search_pattern.ignore_transition[t][height/2 - 4 + y][0] = True
for x, cell in enumerate(row):
search_pattern.grid[t][height/2 - 4 + y][x] = cell
for t in range(in_time,duration-1):
for y in range(1,height-1):
for x in range(1,width-1):
search_pattern.grid[t][y][x] = "*"
search_pattern.standardise_varaibles_names(indent = indent + 1, verbosity = verbosity)
print_message("Pattern created:\n" + search_pattern.make_string(pattern_output_format = "csv") + "\n", 3, indent = indent+1, verbosity = verbosity)
print_message('Done\n', 3, indent = indent, verbosity = verbosity)
return search_pattern.grid, search_pattern.ignore_transition
| 48.299625
| 151
| 0.378683
| 3,718
| 25,792
| 2.55702
| 0.03362
| 0.347744
| 0.483118
| 0.597875
| 0.846219
| 0.804986
| 0.790155
| 0.757757
| 0.757757
| 0.728726
| 0
| 0.123794
| 0.304707
| 25,792
| 533
| 152
| 48.390244
| 0.406346
| 0
| 0
| 0.722105
| 0
| 0
| 0.10069
| 0
| 0
| 0
| 0
| 0
| 0.006316
| 1
| 0.014737
| false
| 0
| 0.016842
| 0
| 0.046316
| 0.044211
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
88cf2083378ff5918f17946959d9d0560257a05d
| 1,358
|
py
|
Python
|
output_sample.py
|
asherif844/diarization
|
ee9c9a7d88def5110bed2a3696cc484d29def646
|
[
"Unlicense"
] | null | null | null |
output_sample.py
|
asherif844/diarization
|
ee9c9a7d88def5110bed2a3696cc484d29def646
|
[
"Unlicense"
] | null | null | null |
output_sample.py
|
asherif844/diarization
|
ee9c9a7d88def5110bed2a3696cc484d29def646
|
[
"Unlicense"
] | null | null | null |
import json, pprint
output={"audio_test_min_1.wav": [{"dialogue_id": "2020-02-23 11:32:32.270748", "sequence_id": "0", "speaker": 1.0, "start_time": 0.1, "end_time": 26.300000000000004, "text": "On Saturday, or next to meet you right pleasure.So you haven't been here in awhile.Open busy.Well, that's that's probably good.Tell me if I'm correct or not, but I just looked through your chart real quick. It looks like you're on no medications correct no medical problems that we know of today.So tell me what you're in why did you come in today?", "snippet_path": "./pyAudioAnalysis/data/Greenway/snippet_0.wav"}, {"dialogue_id": "2020-02-23 11:32:32.270748", "sequence_id": "1", "speaker": 0.0, "start_time": 26.500000000000004, "end_time": 59.900000000000006, "text": "So I decided to rearrange my whole garden with all coming up so I pulled a lot of my beds and was replacing some mums and after that, you know. Of course, or as usual, but what's your favorite flower.This season, I like lilies, but right now, it's mom's because of the winter very nice very nice but I was noticing. You know after a few days. It was just a lot of pain like radiating almost down the side of my hip into my leg and its sharp at certain points, where if I move, One Direction Or.", "snippet_path": "./pyAudioAnalysis/data/Greenway/snippet_1.wav"}]}
pprint.pprint(output)
| 339.5
| 1,313
| 0.74595
| 252
| 1,358
| 3.960317
| 0.563492
| 0.024048
| 0.026052
| 0.034068
| 0.176353
| 0.176353
| 0.086172
| 0.086172
| 0.086172
| 0.086172
| 0
| 0.087855
| 0.145066
| 1,358
| 4
| 1,314
| 339.5
| 0.771748
| 0
| 0
| 0
| 0
| 0.666667
| 0.834808
| 0.066372
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
cc42606183d873f07888599dfdc2ad11d5cf8e9f
| 1,550
|
py
|
Python
|
mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/__init__.py
|
yarenty/mindsdb
|
9164bca6f45fd0f5ec329babe973f286ffe59709
|
[
"MIT"
] | 261
|
2018-09-28T02:32:17.000Z
|
2018-12-10T06:30:54.000Z
|
mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/__init__.py
|
yarenty/mindsdb
|
9164bca6f45fd0f5ec329babe973f286ffe59709
|
[
"MIT"
] | 27
|
2018-09-26T08:49:11.000Z
|
2018-12-10T14:42:52.000Z
|
mindsdb/api/mysql/mysql_proxy/data_types/mysql_packets/__init__.py
|
yarenty/mindsdb
|
9164bca6f45fd0f5ec329babe973f286ffe59709
|
[
"MIT"
] | 46
|
2018-10-06T10:11:18.000Z
|
2018-12-10T04:02:17.000Z
|
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.err_packet import ErrPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.handshake_packet import HandshakePacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.fast_auth_fail_packet import FastAuthFail
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.password_answer import PasswordAnswer
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.handshake_response_packet import HandshakeResponsePacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.ok_packet import OkPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.switch_auth_packet import SwitchOutPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.switch_auth_response_packet import SwitchOutResponse
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.command_packet import CommandPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.column_count_packet import ColumnCountPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.column_definition_packet import ColumnDefenitionPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.resultset_row_package import ResultsetRowPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.eof_packet import EofPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.stmt_prepare_header import STMTPrepareHeaderPacket
from mindsdb.api.mysql.mysql_proxy.data_types.mysql_packets.binary_resultset_row_package import BinaryResultsetRowPacket
| 96.875
| 120
| 0.903226
| 222
| 1,550
| 5.981982
| 0.211712
| 0.124247
| 0.158133
| 0.214608
| 0.60241
| 0.60241
| 0.60241
| 0.60241
| 0.60241
| 0.60241
| 0
| 0
| 0.03871
| 1,550
| 15
| 121
| 103.333333
| 0.891275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.066667
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
cc7c3409cf3a7ea0558beaf279857da04f2dc2f7
| 10,667
|
py
|
Python
|
datahub/company/test/test_consent_client.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | null | null | null |
datahub/company/test/test_consent_client.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | 4
|
2021-06-30T10:34:50.000Z
|
2021-06-30T10:34:51.000Z
|
datahub/company/test/test_consent_client.py
|
cgsunkel/data-hub-api
|
a92faabf73fb93b5bfd94fd465eafc3e29aa6d8e
|
[
"MIT"
] | null | null | null |
import pytest
from django.conf import settings
from requests.exceptions import ConnectionError, Timeout
from rest_framework import status
from datahub.company import consent as consent
from datahub.company.constants import CONSENT_SERVICE_EMAIL_CONSENT_TYPE
from datahub.core.test_utils import HawkMockJSONResponse
def generate_hawk_response(payload):
"""Mocks HAWK server validation for content."""
return HawkMockJSONResponse(
api_id=settings.COMPANY_MATCHING_HAWK_ID,
api_key=settings.COMPANY_MATCHING_HAWK_KEY,
response=payload,
)
class TestConsentClient:
"""
Test for consent service client module
"""
@pytest.mark.parametrize('accepts_marketing', (True, False))
def test_get_one(self, requests_mock, accepts_marketing):
"""
Try to get consent status for a single email address
"""
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
text=generate_hawk_response({
'results': [{
'email': 'foo@bar.com',
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
] if accepts_marketing else [],
}],
}),
status_code=status.HTTP_200_OK,
)
resp = consent.get_one('foo@bar.com')
assert resp == accepts_marketing
assert matcher.called_once
assert matcher.last_request.query == 'limit=1'
assert matcher.last_request.json() == {'emails': ['foo@bar.com']}
@pytest.mark.parametrize('emails', ([], ['foo@bar.com'], ['bar@foo.com', 'foo@bar.com']))
@pytest.mark.parametrize('accepts_marketing', (True, False))
def test_get_many(self, requests_mock, accepts_marketing, emails):
"""
Try to get consent status for a list of email addresses
"""
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
text=generate_hawk_response({
'results': [
{
'email': email,
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
] if accepts_marketing else [],
} for email in emails
],
}),
status_code=status.HTTP_200_OK,
)
resp = consent.get_many(emails)
assert resp == {email: accepts_marketing for email in emails}
assert matcher.called_once
assert matcher.last_request.query == f'limit={len(emails)}'
assert matcher.last_request.json() == {'emails': emails}
@pytest.mark.parametrize('accepts_marketing', (True, False))
def test_get_one_normalises_emails(self, requests_mock, accepts_marketing):
"""
Try to get consent status for a single email address
"""
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
text=generate_hawk_response({
'results': [{
'email': 'foo@bar.com',
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
] if accepts_marketing else [],
}],
}),
status_code=status.HTTP_200_OK,
)
resp = consent.get_one('FOO@BAR.COM')
assert resp == accepts_marketing
assert matcher.called_once
assert matcher.last_request.query == 'limit=1'
assert matcher.last_request.json() == {'emails': ['foo@bar.com']}
@pytest.mark.parametrize('emails', ([], ['foo@bar.com'], ['bar@foo.com', 'foo@bar.com']))
@pytest.mark.parametrize('accepts_marketing', (True, False))
def test_get_many_normalises_emails(self, requests_mock, accepts_marketing, emails):
"""
Try to get consent status for a list of email addresses
"""
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
text=generate_hawk_response({
'results': [
{
'email': email,
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
] if accepts_marketing else [],
} for email in emails
],
}),
status_code=status.HTTP_200_OK,
)
resp = consent.get_many([email.upper() for email in emails])
assert resp == {email: accepts_marketing for email in emails}
assert matcher.called_once
assert matcher.last_request.query == f'limit={len(emails)}'
assert matcher.last_request.json() == {'emails': emails}
@pytest.mark.parametrize('accepts_marketing', (True, False))
def test_update(self, requests_mock, accepts_marketing):
"""
Try to update consent status
"""
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH}',
text=generate_hawk_response({
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
],
'modified_at': '2020-03-12T15:33:50.907000Z',
'email': 'foo@bar.com',
'phone': '',
'key_type': 'email',
}),
status_code=status.HTTP_201_CREATED,
)
result = consent.update_consent('foo@bar.com', accepts_marketing)
assert result is None
assert matcher.called_once
def test_forward_zipkin_headers(self, requests_mock):
"""
Forward zipkin headers from origin request to the API call
"""
headers = {
'x-b3-traceid': '123',
'x-b3-spanid': '456',
}
matcher = requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH}',
text=generate_hawk_response({
'consents': [
CONSENT_SERVICE_EMAIL_CONSENT_TYPE,
],
'modified_at': '2020-03-12T15:33:50.907000Z',
'email': 'foo@bar.com',
'phone': '',
'key_type': 'email',
}),
status_code=status.HTTP_201_CREATED,
)
result = consent.update_consent('foo@bar.com', False, None,
headers=headers)
assert result is None
assert headers.items() <= matcher.last_request.headers.items()
assert matcher.called_once
@pytest.mark.parametrize(
'response_status',
(
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_405_METHOD_NOT_ALLOWED,
status.HTTP_500_INTERNAL_SERVER_ERROR,
status.HTTP_502_BAD_GATEWAY,
status.HTTP_503_SERVICE_UNAVAILABLE,
status.HTTP_504_GATEWAY_TIMEOUT,
),
)
def test_get_one_raises_exception_when_service_http_errors(
self,
requests_mock,
response_status,
):
"""
When the Consent Service responds with a http error, It raises a HTTPError.
"""
requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
status_code=response_status,
)
with pytest.raises(consent.ConsentAPIHTTPError):
consent.get_one('foo@bar.com')
@pytest.mark.parametrize(
'response_status',
(
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_405_METHOD_NOT_ALLOWED,
status.HTTP_500_INTERNAL_SERVER_ERROR,
status.HTTP_502_BAD_GATEWAY,
status.HTTP_503_SERVICE_UNAVAILABLE,
status.HTTP_504_GATEWAY_TIMEOUT,
),
)
def test_get_many_raises_exception_when_service_http_errors(
self,
requests_mock,
response_status,
):
"""
When the Consent Service responds with a http error, It raises a HTTPError.
"""
requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
status_code=response_status,
)
emails = ['foo1@bar.com', 'foo2@bar.com', 'foo3@bar.com']
with pytest.raises(consent.ConsentAPIHTTPError):
consent.get_many(emails)
@pytest.mark.parametrize(
'exceptions',
(
(ConnectionError, consent.ConsentAPIConnectionError),
(Timeout, consent.ConsentAPITimeoutError),
),
)
def test_get_many_raises_exception_on_connection_or_timeout_error(
self,
requests_mock,
exceptions,
):
"""
When the Consent Service responds with a 4XX error, It raises
a ConnectionError or a Timeout Error
"""
(request_exception, consent_exception) = exceptions
requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
exc=request_exception,
)
emails = ['foo1@bar.com', 'foo2@bar.com', 'foo3@bar.com']
with pytest.raises(consent_exception):
consent.get_many(emails)
@pytest.mark.parametrize(
'exceptions',
(
(ConnectionError, consent.ConsentAPIConnectionError),
(Timeout, consent.ConsentAPITimeoutError),
),
)
def test_get_one_raises_exception_on_connection_or_timeout_error(
self,
requests_mock,
exceptions,
):
"""
When the Consent Service responds with a 4XX error, It raises
a ConnectionError or a Timeout Error
"""
(request_exception, consent_exception) = exceptions
requests_mock.post(
f'{settings.CONSENT_SERVICE_BASE_URL}'
f'{consent.CONSENT_SERVICE_PERSON_PATH_LOOKUP}',
exc=request_exception,
)
with pytest.raises(consent_exception):
consent.get_one('foo@bar.com')
| 36.282313
| 93
| 0.585825
| 1,099
| 10,667
| 5.389445
| 0.150136
| 0.075637
| 0.024312
| 0.028702
| 0.872868
| 0.858011
| 0.851595
| 0.819348
| 0.819348
| 0.819348
| 0
| 0.017884
| 0.318553
| 10,667
| 293
| 94
| 36.406143
| 0.796946
| 0.068998
| 0
| 0.763713
| 0
| 0
| 0.156871
| 0.085886
| 0
| 0
| 0
| 0
| 0.088608
| 1
| 0.046414
| false
| 0
| 0.029536
| 0
| 0.084388
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aee95d6691870adedfca774a79e7a11ad3704ff1
| 158
|
py
|
Python
|
tests/main_test.py
|
Madoshakalaka/pyckage-example-project
|
5303f65adcff6e2787e2d2ba8a468a99c14f42b1
|
[
"MIT"
] | null | null | null |
tests/main_test.py
|
Madoshakalaka/pyckage-example-project
|
5303f65adcff6e2787e2d2ba8a468a99c14f42b1
|
[
"MIT"
] | null | null | null |
tests/main_test.py
|
Madoshakalaka/pyckage-example-project
|
5303f65adcff6e2787e2d2ba8a468a99c14f42b1
|
[
"MIT"
] | null | null | null |
from pyckage_example_project.main import trivial_function_that_returns_one
def test_trivial_function():
assert trivial_function_that_returns_one() == 1
| 26.333333
| 74
| 0.848101
| 22
| 158
| 5.545455
| 0.681818
| 0.368852
| 0.311475
| 0.42623
| 0.47541
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.101266
| 158
| 5
| 75
| 31.6
| 0.852113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
4e06858fb22daf6f8a84de8fa15210870bdb13c1
| 18,075
|
py
|
Python
|
skipole/skilift/editwidget.py
|
bernie-skipole/skipole
|
b45d3291c593e7c03c053ab4f192f1ecc5c3e9b9
|
[
"MIT"
] | null | null | null |
skipole/skilift/editwidget.py
|
bernie-skipole/skipole
|
b45d3291c593e7c03c053ab4f192f1ecc5c3e9b9
|
[
"MIT"
] | null | null | null |
skipole/skilift/editwidget.py
|
bernie-skipole/skipole
|
b45d3291c593e7c03c053ab4f192f1ecc5c3e9b9
|
[
"MIT"
] | null | null | null |
"""Functions for editing a widget"""
import pkgutil, importlib, inspect, re
from ..ski import widgets, dump_project
from ..ski.excepts import ServerError, ValidateError
from . import widget_info, fromjson, insert_item_in_page, insert_item_in_section, get_proj_page, get_proj_section
from .info_tuple import WidgetDescription, FieldDescription, ContainerInfo
# a search for anything none-alphanumeric and not an underscore
_AN = re.compile('[^\w]')
def widget_modules():
"Return a tuple of widget module names"
return tuple(name for (module_loader, name, ispkg) in pkgutil.iter_modules(widgets.__path__))
def widgets_in_module(module_name):
"Returns a list of WidgetDescription's present in the module"
module = importlib.import_module("skipole.ski.widgets." + module_name)
widget_list = []
for classname,obj in inspect.getmembers(module, lambda member: inspect.isclass(member) and (member.__module__ == module.__name__)):
fields_single = [ FieldDescription(*fld) for fld in obj.field_arguments_single() ]
fields_list = [ FieldDescription(*fld, False, False) for fld in obj.field_arguments_list() ]
fields_table = [ FieldDescription(*fld, False, False) for fld in obj.field_arguments_table() ]
fields_dictionary = [ FieldDescription(*fld, False, False) for fld in obj.field_arguments_dictionary() ]
widget_list.append( WidgetDescription( module_name,
classname,
'',
obj.field_arguments(),
obj.len_containers(),
obj.description(),
fields_single,
fields_list,
fields_table,
fields_dictionary,
'',
None
) )
return widget_list
def _create_new_widget(project, module_name, widget_classname, name, brief):
"Creates a new widget"
if not name:
raise ServerError("Invalid name")
lower_name = name.lower()
if (lower_name == 'body') or (lower_name == 'head') or (lower_name == 'svg') or (lower_name == 'show_error'):
raise ServerError(message="Unable to create the widget, the name given is reserved")
if _AN.search(name):
raise ServerError(message="Invalid name, alphanumeric and underscore only")
if name[0] == '_':
raise ServerError(message="Invalid name, must not start with an underscore")
if name.isdigit():
raise ServerError(message="Unable to create the widget, the name must include some letters")
modules_tuple = widget_modules()
if module_name not in modules_tuple:
raise FailPage("Module not identified")
module = importlib.import_module("skipole.ski.widgets." + module_name)
widget_dict = {name:cls for (name,cls) in inspect.getmembers(module, lambda member: inspect.isclass(member) and (member.__module__ == module.__name__))}
if widget_classname not in widget_dict:
raise ServerError(message="Widget not identified")
widget_cls = widget_dict[widget_classname]
widget_instance = widget_cls(name=name, brief=brief)
# set widget css class defaults, taken from defaults.json
widget_fields = widget_instance.fields
try:
for fieldarg, field in widget_fields.items():
if field.cssclass or field.cssstyle:
# get default
default_value = fromjson.get_widget_default_field_value(project, module_name, widget_classname, fieldarg)
widget_instance.set_field_value(fieldarg, default_value)
except e:
raise ServerError("Unable to obtain defaults from defaults.json")
return widget_instance
def create_new_widget_in_page(project, pagenumber, pchange, location, module_name, widget_classname, name, brief):
"Creates a new widget in the given page, returns the new pchange and new location"
widget_instance = _create_new_widget(project, module_name, widget_classname, name, brief)
# call skilift.insert_item_in_page to insert the item, save the page and return pchange and new location
return insert_item_in_page(project, pagenumber, pchange, location, widget_instance)
def create_new_widget_in_section(project, section_name, schange, location, module_name, widget_classname, name, brief):
"Creates a new widget in the given section, returns the new schange and new location"
widget_instance = _create_new_widget(project, module_name, widget_classname, name, brief)
# call skilift.insert_item_in_section to insert the item, save the section and return schange and new location
return insert_item_in_section(project, section_name, schange, location, widget_instance)
def page_widget(project, pagenumber, pchange, name):
"""Return a widget dictionary from the page"""
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
parttext, partdict = widget.outline(project)
return partdict
def section_widget(project, section_name, schange, name):
"""Return a widget dictionary from the section"""
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
parttext, partdict = widget.outline(project)
return partdict
def page_widget_description(project, pagenumber, pchange, name):
"""Return a WidgetDescription from the page"""
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
fields_single = [ FieldDescription(*fld) for fld in widget.field_arguments_single() ]
fields_list = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_list() ]
fields_table = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_table() ]
fields_dictionary = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_dictionary() ]
widgetsection_name, parent_widget_name, parent_container = widget.embedded
return WidgetDescription( widget.module_name(),
widget.__class__.__name__,
widget.brief,
widget.field_arguments(),
widget.len_containers(),
widget.description(),
fields_single,
fields_list,
fields_table,
fields_dictionary,
parent_widget_name,
parent_container
)
def section_widget_description(project, section_name, schange, name):
"""Return a widget WidgetDescription from the section"""
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
fields_single = [ FieldDescription(*fld) for fld in widget.field_arguments_single() ]
fields_list = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_list() ]
fields_table = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_table() ]
fields_dictionary = [ FieldDescription(*fld, False, False) for fld in widget.field_arguments_dictionary() ]
widgetsection_name, parent_widget_name, parent_container = widget.embedded
return WidgetDescription( widget.module_name(),
widget.__class__.__name__,
widget.brief,
widget.field_arguments(),
widget.len_containers(),
widget.description(),
fields_single,
fields_list,
fields_table,
fields_dictionary,
parent_widget_name,
parent_container
)
def rename_page_widget(project, pagenumber, pchange, name, new_name):
"""Given a widget at project, pagenumber, with name
sets a new name, returns page change uuid """
if not new_name:
raise ServerError("Invalid name")
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
if name == new_name:
raise ServerError("Name unchanged")
new_lower_name = new_name.lower()
if (new_lower_name == 'body') or (new_lower_name == 'head') or (new_lower_name == 'svg') or (new_lower_name == 'show_error'):
raise ServerError(message="Unable to create the widget, the name given is reserved")
if _AN.search(new_name):
raise ServerError(message="Invalid name, alphanumeric and underscore only")
if new_name[0] == '_':
raise ServerError(message="Invalid name, must not start with an underscore")
if new_name.isdigit():
raise ServerError(message="Unable to create the widget, the name must include some letters")
if new_name in page.widgets:
raise ServerError("Duplicate widget name in the page")
if new_name in page.section_places:
raise ServerError("This name clashes with a section alias within this page")
widget.name = new_name
# save the altered page, and return the page.change uuid
return proj.save_page(page)
def new_brief_in_page_widget(project, pagenumber, pchange, name, brief):
"""Given a widget at project, pagenumber, with name
sets a new brief, returns page change uuid """
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
widget.brief = brief
# save the altered page, and return the page.change uuid
return proj.save_page(page)
def rename_section_widget(project, section_name, schange, name, new_name):
"""Given a widget at project, pagenumber, with name
sets a new name, returns page change uuid """
if not new_name:
raise ServerError("Invalid name")
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
if name == new_name:
raise ServerError("Name unchanged")
new_lower_name = new_name.lower()
if (new_lower_name == 'body') or (new_lower_name == 'head') or (new_lower_name == 'svg') or (new_lower_name == 'show_error'):
raise ServerError(message="Unable to create the widget, the name given is reserved")
if _AN.search(new_name):
raise ServerError(message="Invalid name, alphanumeric and underscore only")
if new_name[0] == '_':
raise ServerError(message="Invalid name, must not start with an underscore")
if new_name.isdigit():
raise ServerError(message="Unable to create the widget, the name must include some letters")
if new_name in section.widgets:
raise ServerError("Duplicate widget name in the section")
widget.name = new_name
# save the altered section, and return the section.change uuid
return proj.add_section(section_name, section)
def new_brief_in_section_widget(project, section_name, schange, name, brief):
"""Given a widget at project, section_name, with name
sets a new brief, returns section change uuid """
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
widget.brief = brief
# save the altered section, and return the section.change uuid
return proj.add_section(section_name, section)
def set_widget_field_name_in_page(project, pagenumber, pchange, widget_name, field_arg, name):
"Given a widget field, sets its name"
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
if not name:
raise ServerError("Invalid name")
if name.startswith('_'):
raise ServerError(message="Invalid name - cannot start with an underscore.")
if (name == "show_error") and (field_arg != "show_error"):
raise ServerError(message="Invalid name - show_error is a reserved name.")
try:
widget.set_name(field_arg, name)
except ValidateError as e:
raise ServerError(message=e.message)
# save the altered page, and return the page.change uuid
return proj.save_page(page)
def set_widget_field_name_in_section(project, section_name, schange, widget_name, field_arg, name):
"Given a widget field, sets its name"
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
if not name:
raise ServerError("Invalid name")
if name.startswith('_'):
raise ServerError(message="Invalid name - cannot start with an underscore.")
if (name == "show_error") and (field_arg != "show_error"):
raise ServerError(message="Invalid name - show_error is a reserved name.")
try:
widget.set_name(field_arg, name)
except ValidateError as e:
raise ServerError(message=e.message)
# save the altered page, and return the page.change uuid
return proj.add_section(section_name, section)
def set_widget_field_value_in_page(project, pagenumber, pchange, widget_name, field_arg, value):
"Given a widget field, sets its value"
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
# only single values can be set, not lists tables or dictionaries
single_field_args = { arg[0]: arg for arg in widget.field_arguments_single() }
if field_arg not in single_field_args:
raise ServerError("Cannot set a value on this field")
field_info = single_field_args[field_arg]
str_value = str(value)
if (field_info[2] == "ident" or field_info[2] == "url") and str_value.isdigit():
# The user is inputting a digit as a page ident
str_value = project + '_' + str_value
try:
widget.set_field_value(field_arg, str_value)
except ValidateError as e:
raise ServerError(message=e.message)
# save the altered page, and return the page.change uuid
return proj.save_page(page)
def set_widget_field_value_in_section(project, section_name, schange, widget_name, field_arg, value):
"Given a widget field, sets its value"
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
# only single values can be set, not lists tables or dictionaries
single_field_args = { arg[0]: arg for arg in widget.field_arguments_single() }
if field_arg not in single_field_args:
raise ServerError("Cannot set a value on this field")
field_info = single_field_args[field_arg]
str_value = str(value)
if (field_info[2] == "ident" or field_info[2] == "url") and str_value.isdigit():
# The user is inputting a digit as a page ident
str_value = project + '_' + str_value
try:
widget.set_field_value(field_arg, str_value)
except ValidateError as e:
raise ServerError(message=e.message)
# save the altered section, and return the section.change uuid
return proj.add_section(section_name, section)
def container_in_page(project, pagenumber, pchange, widget_name, container):
"Returns a ContainerInfo named tuple"
proj, page = get_proj_page(project, pagenumber, pchange)
widget = page.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
return ContainerInfo(container, widget.is_container_empty(container))
def container_in_section(project, section_name, schange, widget_name, container):
"Returns a ContainerInfo named tuple"
proj, section = get_proj_section(project, section_name, schange)
widget = section.widgets.get(widget_name)
if (not isinstance(widget, widgets.Widget)) and (not isinstance(widget, widgets.ClosedWidget)):
raise ServerError("Widget not found")
return ContainerInfo(container, widget.is_container_empty(container))
| 50.630252
| 156
| 0.677898
| 2,231
| 18,075
| 5.297176
| 0.080233
| 0.064986
| 0.045016
| 0.061601
| 0.854713
| 0.850398
| 0.82789
| 0.78152
| 0.763581
| 0.744965
| 0
| 0.000651
| 0.235463
| 18,075
| 356
| 157
| 50.772472
| 0.854548
| 0.116681
| 0
| 0.723636
| 0
| 0
| 0.126122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069091
| false
| 0
| 0.025455
| 0
| 0.163636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e2405d1a2dcb26759e002707cd51002359b14c4
| 2,237
|
py
|
Python
|
database/migrations/0002_auto_20200127_1624.py
|
gbriones1/django-skelleton
|
ee067594e3994f1bac5bf754f618d365bb5248d8
|
[
"BSD-3-Clause"
] | null | null | null |
database/migrations/0002_auto_20200127_1624.py
|
gbriones1/django-skelleton
|
ee067594e3994f1bac5bf754f618d365bb5248d8
|
[
"BSD-3-Clause"
] | 10
|
2020-06-05T16:38:25.000Z
|
2022-03-11T23:12:12.000Z
|
database/migrations/0002_auto_20200127_1624.py
|
gbriones1/django-skelleton
|
ee067594e3994f1bac5bf754f618d365bb5248d8
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.0.1 on 2020-01-27 22:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('database', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='customer_contact',
name='department',
field=models.CharField(blank=True, default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='customer_contact',
name='email',
field=models.EmailField(blank=True, default='', max_length=254, null=True),
),
migrations.AlterField(
model_name='customer_contact',
name='phone',
field=models.CharField(blank=True, default='', max_length=15, null=True),
),
migrations.AlterField(
model_name='invoice',
name='price',
field=models.DecimalField(decimal_places=2, default=0, max_digits=9),
),
migrations.AlterField(
model_name='order_product',
name='order',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='database.Order'),
),
migrations.AlterField(
model_name='provider_contact',
name='department',
field=models.CharField(blank=True, default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='provider_contact',
name='email',
field=models.EmailField(blank=True, default='', max_length=254, null=True),
),
migrations.AlterField(
model_name='provider_contact',
name='phone',
field=models.CharField(blank=True, default='', max_length=15, null=True),
),
migrations.AlterField(
model_name='quotation',
name='discount',
field=models.DecimalField(decimal_places=2, default=0, max_digits=9),
),
migrations.AlterField(
model_name='quotation',
name='service',
field=models.DecimalField(decimal_places=2, default=0, max_digits=9),
),
]
| 34.415385
| 113
| 0.583371
| 223
| 2,237
| 5.713004
| 0.282511
| 0.156986
| 0.196232
| 0.22763
| 0.762166
| 0.762166
| 0.741758
| 0.683673
| 0.683673
| 0.658556
| 0
| 0.02776
| 0.291462
| 2,237
| 64
| 114
| 34.953125
| 0.776025
| 0.020116
| 0
| 0.741379
| 1
| 0
| 0.106393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d91b69cf9625ded797fbbceb62ebc295c8766c4
| 1,994
|
py
|
Python
|
backend/reserve_app/permissions.py
|
mmohajer9/reserve-system
|
30415deb64fd6671aa4898f4374c22f642b009f5
|
[
"MIT"
] | 3
|
2021-02-28T17:08:23.000Z
|
2022-02-22T08:43:32.000Z
|
backend/reserve_app/permissions.py
|
mmohajer9/reserve-system
|
30415deb64fd6671aa4898f4374c22f642b009f5
|
[
"MIT"
] | 8
|
2021-04-08T22:03:58.000Z
|
2022-03-12T00:54:46.000Z
|
backend/reserve_app/permissions.py
|
mmohajer9/reserve-system
|
30415deb64fd6671aa4898f4374c22f642b009f5
|
[
"MIT"
] | null | null | null |
from rest_framework.permissions import BasePermission
from .models import *
class SampleCustomPermission(BasePermission):
''' First Priority : has_object_permission() will be executed for the first time '''
#ONLY FOR A API VIEW THAT IS DEDICATED FOR A SPECIFIC OBJECT LIKE : RetrieveUpdateAPIView and not like : ListAPIView
def has_object_permission(self, request, view, obj):
"""
Return `True` if permission is granted, `False` otherwise.
"""
return True
'''After the first priority the second priority that is has_permission() will be executed'''
def has_permission(self, request, view):
"""
Return `True` if permission is granted, `False` otherwise.
"""
return True
class isAdminOrReadOnly(BasePermission):
#ONLY FOR A API VIEW THAT IS DEDICATED FOR A SPECIFIC OBJECT LIKE : RetrieveUpdateAPIView and not like : ListAPIView
def has_object_permission(self, request, view, obj):
"""
Return `True` if permission is granted, `False` otherwise.
"""
if request.user.is_staff or request.user.is_superuser:
return True
if request.method == 'GET':
return True
elif request.method == 'PUT' or request.method == 'PATCH' or request.method == 'POST':
if not bool(request.user.is_staff or request.user.is_superuser):
return False
else:
return True
else:
return False
def has_permission(self, request, view):
if request.user.is_staff or request.user.is_superuser:
return True
if request.method == 'GET':
return True
elif request.method == 'PUT' or request.method == 'PATCH' or request.method == 'POST':
if not bool(request.user.is_staff or request.user.is_superuser):
return False
else:
return True
else:
return False
| 32.16129
| 120
| 0.621866
| 233
| 1,994
| 5.244635
| 0.248927
| 0.090016
| 0.085106
| 0.081833
| 0.776596
| 0.776596
| 0.725859
| 0.725859
| 0.725859
| 0.725859
| 0
| 0
| 0.296891
| 1,994
| 61
| 121
| 32.688525
| 0.871612
| 0.24323
| 0
| 0.875
| 0
| 0
| 0.022422
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.0625
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9dc600070a67e68475afe91f399c071b074692bd
| 156
|
py
|
Python
|
calculation/gmhazard_calc/gmhazard_calc/rupture/__init__.py
|
ucgmsim/gmhazard
|
d3d90b4c94b3d9605597a3efeccc8523a1e50c0e
|
[
"MIT"
] | null | null | null |
calculation/gmhazard_calc/gmhazard_calc/rupture/__init__.py
|
ucgmsim/gmhazard
|
d3d90b4c94b3d9605597a3efeccc8523a1e50c0e
|
[
"MIT"
] | 8
|
2021-10-13T02:33:23.000Z
|
2022-03-29T21:01:08.000Z
|
calculation/gmhazard_calc/gmhazard_calc/rupture/__init__.py
|
ucgmsim/gmhazard
|
d3d90b4c94b3d9605597a3efeccc8523a1e50c0e
|
[
"MIT"
] | null | null | null |
from .rupture import rupture_df_from_erf, rupture_name_to_id, rupture_id_to_ix, rupture_name_to_id_ix, rupture_id_ix_to_rupture_id, get_fault_header_points
| 78
| 155
| 0.903846
| 30
| 156
| 4
| 0.433333
| 0.225
| 0.216667
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 156
| 1
| 156
| 156
| 0.816327
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
9dc7b769342124d21f2194408a4016e7206030a1
| 5,422
|
py
|
Python
|
migrations/versions/4ac528b772e_.py
|
finnurtorfa/aflafrettir.is
|
f5d973f567434286ed28017bf88b4e043c19d853
|
[
"MIT"
] | null | null | null |
migrations/versions/4ac528b772e_.py
|
finnurtorfa/aflafrettir.is
|
f5d973f567434286ed28017bf88b4e043c19d853
|
[
"MIT"
] | 11
|
2015-01-03T14:50:22.000Z
|
2015-06-01T17:04:42.000Z
|
migrations/versions/4ac528b772e_.py
|
finnurtorfa/aflafrettir.is
|
f5d973f567434286ed28017bf88b4e043c19d853
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: 4ac528b772e
Revises: 3dc9c5ee9d8
Create Date: 2015-03-25 14:54:49.106037
"""
# revision identifiers, used by Alembic.
revision = '4ac528b772e'
down_revision = '3dc9c5ee9d8'
from alembic import op
import sqlalchemy as sa
import datetime
Session = sa.orm.sessionmaker()
def upgrade():
### commands auto generated by Alembic - please adjust! ###
cat = op.create_table('categories_new',
sa.Column('id', sa.Integer, nullable=False),
sa.Column('name', sa.String(64), nullable=False),
sa.Column('name_en',
sa.String(64),
nullable=False,
default='EDIT MANUALLY'),
sa.Column('active',
sa.Boolean,
nullable=False,
default=False),
sa.UniqueConstraint('name'),
sa.PrimaryKeyConstraint('id'))
img = op.create_table('images_new',
sa.Column('id', sa.Integer, nullable=False),
sa.Column('filename', sa.String(120)),
sa.Column('ad_html', sa.Text),
sa.Column('location', sa.String(120), nullable=False),
sa.Column('url', sa.String(120)),
sa.Column('type', sa.Integer, nullable=False),
sa.Column('active',
sa.Boolean,
nullable=False,
default=False),
sa.Column('timestamp', sa.DateTime, nullable=False),
sa.PrimaryKeyConstraint('id'))
bind = op.get_bind()
results = bind.execute('select * from categories').fetchall()
categories = [{'id': r[0],
'name': r[1],
'name_en': 'EDIT MANUALLY',
'active': r[3]} for r in results] # Change to 2 afterwards
op.bulk_insert(cat, categories)
results = bind.execute('select * from images').fetchall()
images = [{'id': r[0],
'filename': r[1],
'ad_html': '',
'location': r[3],
'url': r[4], # Change to 7 afterwards?
'type': r[5],
'active': r[6],
'timestamp': datetime\
.datetime.strptime(r[7],
'%Y-%m-%d %H:%M:%S.%f')} for r in results]
op.bulk_insert(img, images)
op.drop_table('categories')
op.drop_table('images')
op.rename_table('categories_new', 'categories')
op.rename_table('images_new', 'images')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
img = op.create_table('images_new',
sa.Column('id', sa.Integer, nullable=False),
sa.Column('filename', sa.String(120)),
sa.Column('file_url', sa.String(120)),
sa.Column('location', sa.String(120), nullable=False),
sa.Column('url', sa.String(120)),
sa.Column('type', sa.Integer, nullable=False),
sa.Column('active',
sa.Boolean,
nullable=False,
default=False),
sa.Column('timestamp', sa.DateTime, nullable=False),
sa.PrimaryKeyConstraint('id'))
cat = op.create_table('categories_new',
sa.Column('id', sa.Integer, nullable=False),
sa.Column('name', sa.String(64), nullable=False),
sa.Column('name_en',
sa.String(64),
default='EDIT MANUALLY'),
sa.Column('active',
sa.Boolean,
nullable=False,
default=False),
sa.UniqueConstraint('name'),
sa.PrimaryKeyConstraint('id'))
bind = op.get_bind()
results = bind.execute('select * from images').fetchall()
images = [{'id': r[0],
'filename': r[1],
'file_url': '',
'location': r[3],
'url': r[4], # Change to 7 afterwards?
'type': r[5],
'active': r[6],
'timestamp': datetime\
.datetime.strptime(r[7],
'%Y-%m-%d %H:%M:%S.%f')} for r in results]
op.bulk_insert(img, images)
results = bind.execute('select * from categories').fetchall()
categories = [{'id': r[0],
'name': r[1],
'name_en': 'EDIT MANUALLY',
'active': r[3]} for r in results] # Change to 2 afterwards
op.bulk_insert(cat, categories)
op.drop_table('images')
op.drop_table('categories')
op.rename_table('images_new', 'images')
op.rename_table('categories_new', 'categories')
### end Alembic commands ###
| 39.576642
| 80
| 0.448359
| 513
| 5,422
| 4.670565
| 0.19883
| 0.080134
| 0.075125
| 0.087646
| 0.866861
| 0.861436
| 0.830551
| 0.735392
| 0.735392
| 0.735392
| 0
| 0.030902
| 0.421062
| 5,422
| 136
| 81
| 39.867647
| 0.732399
| 0.070638
| 0
| 0.885714
| 0
| 0
| 0.128749
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019048
| false
| 0
| 0.028571
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dd0e707f7d843b1869f1e4cf2ae87aa93027552
| 194
|
py
|
Python
|
contrib/ldap/tests/filter.py
|
gonicus/clacks
|
da579f0acc4e48cf2e9451417ac6792282cf7ab6
|
[
"ZPL-2.1"
] | 2
|
2015-01-26T07:15:19.000Z
|
2015-11-09T13:42:11.000Z
|
contrib/ldap/tests/filter.py
|
gonicus/clacks
|
da579f0acc4e48cf2e9451417ac6792282cf7ab6
|
[
"ZPL-2.1"
] | null | null | null |
contrib/ldap/tests/filter.py
|
gonicus/clacks
|
da579f0acc4e48cf2e9451417ac6792282cf7ab6
|
[
"ZPL-2.1"
] | null | null | null |
# -*- coding: utf-8 -*-
import ldap.filter
print ldap.filter.filter_format("(&(objectClass=%s)(cn=%s))", ['posixAccount', 'ganz&oder)so'])
print ldap.filter.escape_filter_chars('ganz&oder)so')
| 32.333333
| 95
| 0.695876
| 28
| 194
| 4.714286
| 0.607143
| 0.227273
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005556
| 0.072165
| 194
| 5
| 96
| 38.8
| 0.727778
| 0.108247
| 0
| 0
| 0
| 0
| 0.362573
| 0.152047
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
d1c0b2fb080a9a4fd2b2c6912df8c7c6abfd1413
| 61
|
py
|
Python
|
mms_python_adapter/__init__.py
|
Open-MBEE/MMS-Python-Adapter
|
3b98f53abace50b921fcfa0c7929b545ba163f89
|
[
"Apache-2.0"
] | null | null | null |
mms_python_adapter/__init__.py
|
Open-MBEE/MMS-Python-Adapter
|
3b98f53abace50b921fcfa0c7929b545ba163f89
|
[
"Apache-2.0"
] | 2
|
2021-01-06T18:16:29.000Z
|
2021-03-11T19:38:16.000Z
|
mms_python_adapter/__init__.py
|
Open-MBEE/MMS-Python-Adapter
|
3b98f53abace50b921fcfa0c7929b545ba163f89
|
[
"Apache-2.0"
] | null | null | null |
from mms_python_adapter.mms_python_adapter import MMSAdapter
| 30.5
| 60
| 0.918033
| 9
| 61
| 5.777778
| 0.666667
| 0.346154
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 61
| 1
| 61
| 61
| 0.912281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d1ffa017137ff203a832dd6e68b15aff25aa6f35
| 51,983
|
py
|
Python
|
main.py
|
gianmarcocalbi/asyncdsm-sym
|
21a82c5fafada360faef340db805a772cec61530
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
gianmarcocalbi/asyncdsm-sym
|
21a82c5fafada360faef340db805a772cec61530
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
gianmarcocalbi/asyncdsm-sym
|
21a82c5fafada360faef340db805a772cec61530
|
[
"Apache-2.0"
] | null | null | null |
import argparse
from sklearn.preprocessing import normalize
import simulator
from src import statistics, graphs
from src.utils import *
def get_graphs(graph_type, nodes):
deg = {
100: [
2,
3,
4,
8,
16,
32,
64,
99
],
400: [2, 3, 4, 6, 20, 50, 100, 200, 300, 399],
1000: [2, 3, 4, 8, 16, 20, 30, 40, 50, 100, 200, 500, 999]
}[nodes]
g = []
for d in deg:
if d == nodes - 1:
g.append(str(d) + '-clique')
else:
g.append(str(d) + '-' + graph_type)
return g
def run(core=-1):
"""
Parameters
----------
core: int
Integer from argparse module to be used as switcher.
Returns
-------
None
"""
"""for g_name, A in graphs.generate_n_nodes_graphs(100, get_graphs('cycle', 100)).items():
M = A / sum(A[0])
eigenvals = np.linalg.eigvals(M)
np.sort(eigenvals)
print(g_name + ' ' + str(abs(eigenvals[1])))"""
if core == 0:
test4_on_multieigvecsvm_dataset(seed=22052010, n=100, n_samples=20, distr='exp')
elif core == 1:
test4_on_multieigvecsvm_dataset(seed=22052010, n=100, n_samples=30, distr='exp')
def test7_on_sloreg_dataset(seed=None, n=100):
# linear_regression result = 67.30972320004327
simulator.run(
seed=seed,
n=n,
n_samples=52000,
graphs=get_graphs('expander', n),
dataset='sloreg',
starting_weights_domain=[2, 3],
max_iter=5000,
max_time=None,
alpha=5e-6,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=statistics.CustomRealTimings,
time_distr_param=[],
obj_function='mse',
metrics=[],
metrics_type=2,
metrics_nodes='worst',
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'test7',
'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test7_on_susysvm_dataset(seed=None, n=100):
simulator.run(
seed=seed,
n=n,
n_samples=500000,
graphs=get_graphs('expander', n),
dataset='susysvm',
starting_weights_domain=[-0.5, 2],
max_iter=6000,
max_time=None,
alpha=5e-2,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=statistics.CustomRealTimings,
time_distr_param=[],
obj_function='hinge_loss',
average_model_toggle=True,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'test7',
'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test6_on_sloreg_dataset(seed=None, n=100):
# linear_regression result = 67.30972320004327
simulator.run(
seed=seed,
n=n,
n_samples=52000,
graphs=get_graphs('expander', n),
dataset='sloreg',
starting_weights_domain=[2, 3],
max_iter=5000,
max_time=None,
alpha=5e-6,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=statistics.SparkRealTimings,
time_distr_param=[],
obj_function='mse',
metrics=[],
metrics_type=2,
metrics_nodes='worst',
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'test6',
'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test6_on_susysvm_dataset(seed=None, n=100):
simulator.run(
seed=seed,
n=n,
n_samples=500000,
graphs=get_graphs('expander', n),
dataset='susysvm',
starting_weights_domain=[-0.5, 2],
max_iter=6000,
max_time=None,
alpha=5e-2,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=statistics.SparkRealTimings,
time_distr_param=[],
obj_function='hinge_loss',
average_model_toggle=True,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'test6',
'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test5_on_multieigvecsvm_dataset(seed=None, n=100, k=16, n_samples=2, distr='par', metrics_nodes='all', alert=False):
if alert:
print('test_exp_on_unisvm_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], }[distr]
if metrics_nodes in ['worst', 'all']:
metrics_type = {'worst': 2, 'all': 0, 'best': 2}[metrics_nodes]
else:
metrics_type = 2
simulator.run(
seed=seed,
n=n,
n_samples=n_samples,
graphs=get_graphs('expander', n),
dataset='{}-multieigvecsvm'.format(k),
starting_weights_domain=[1, 1],
smv_label_flip_prob=0.00,
max_iter=5000,
max_time=None,
alpha=1e-1,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='hinge_loss',
epsilon=-math.inf,
average_model_toggle=True,
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'test5',
'dataset',
'samp',
'alpha',
'nodes',
#'shuffle',
'w_domain',
'distr',
#'time',
'iter',
#'metrics'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test4_on_multieigvecsvm_dataset(seed=None, n=100, n_samples=2, distr='par', alert=False):
if alert:
print('test_exp_on_unisvm_dataset()')
print('n={}, distr={}'.format(n, distr))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], }[distr]
simulator.run(
seed=seed,
n=n,
n_samples=n_samples,
graphs=get_graphs('expander', n),
dataset='multieigvecsvm',
starting_weights_domain=[1, 1],
smv_label_flip_prob=0.00,
max_iter=5000,
max_time=None,
alpha=1e-1,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='cont_hinge_loss',
epsilon=-math.inf,
average_model_toggle=True,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'test4',
'dataset',
'samp',
'alpha',
'nodes',
#'shuffle',
'w_domain',
'distr',
#'time',
'iter',
#'metrics'
],
test_parent_folder="",
instant_plot=False,
plots=['cont_hinge_loss_iter', 'cont_hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_on_eigvecsvm_dataset(
seed=None,
graph_type='expander',
graphs_list=None,
n=100, distr='par',
metrics_nodes='all',
alt_exp=False,
alert=True
):
if alert:
print('test_exp_on_unisvm_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], }[distr]
if metrics_nodes in ['worst', 'all']:
metrics_type = {'worst': 2, 'all': 0, 'best': 2}[metrics_nodes]
else:
metrics_type = 2
if graphs_list is None or len(graphs_list) == 0:
graphs_list = get_graphs(graph_type, n)
if alt_exp:
dataset = 'alteigvecsvm'
else:
dataset = 'eigvecsvm'
simulator.run(
seed=seed,
n=n,
graphs=graphs_list,
dataset=dataset,
starting_weights_domain=[1, 1],
smv_label_flip_prob=0.00,
max_iter=5000,
max_time=None,
alpha=1e-1,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='cont_hinge_loss',
epsilon=-math.inf,
average_model_toggle=True,
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'3'+graph_type,
'dataset',
'alpha',
'nodes',
'shuffle',
'w_domain',
'distr',
'time',
'iter',
'metrics'
],
test_parent_folder="",
instant_plot=False,
plots=['cont_hinge_loss_iter', 'cont_hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_newreg_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_reg_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '16-expander', '20-expander', '30-expander',
'40-expander', '50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=1000,
n_features=100,
dataset='newreg',
method='subgradient',
dual_averaging_radius=5,
error_std_dev=1,
starting_weights_domain=[20, 30],
max_iter=200,
max_time=None,
alpha=1,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='mse',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'',
'dataset',
'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_reg2_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_reg_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '16-expander', '20-expander', '30-expander',
'40-expander', '50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=1000,
n_features=100,
dataset='reg2',
method='subgradient',
dual_averaging_radius=300000,
error_std_dev=1,
starting_weights_domain=[2, 3],
max_iter=200,
max_time=None,
alpha=1e-1,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='mse',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=False,
test_folder_name_struct=[
'conv_synt_reg',
# 'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=True,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_dual_average_svm(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_dual_average_svm()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '16-expander', '20-expander', '30-expander',
'40-expander', '50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=1000,
n_features=100,
dataset='svm',
smv_label_flip_prob=0.05,
starting_weights_domain=[-2, 2],
max_iter=500,
max_time=None,
method='classic',
alpha=1,
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=None,
time_const_weight=0,
obj_function='hinge_loss',
spectrum_dependent_learning_rate=False,
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
real_metrics_toggle=False,
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'conv_synt_svm',
# 'dataset',
'alpha',
'nodes',
# 'shuffle',
'distr',
'metrics',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_enereg_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_enereg_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '30-expander', '40-expander',
'50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
# linear_regression result = 8658.025636439765
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=14000,
dataset='enereg',
starting_weights_domain=[0, 0],
max_iter=1000,
max_time=None,
alpha=1e-6,
learning_rate='constant',
spectrum_dependent_learning_rate=True,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='mse',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
're100',
'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=True,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=True,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_sloreg_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_enereg_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '16-expander', '20-expander', '30-expander',
'40-expander', '50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
# linear_regression result = 67.30972320004327
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=52000,
dataset='sloreg',
starting_weights_domain=[2, 3],
max_iter=1000,
max_time=None,
alpha=5e-6,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='mse',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=False,
test_folder_name_struct=[
'fixedseed_real_reg',
# 'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['mse_iter', 'mse_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_susysvm_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_susysvm_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution, 'real': statistics.SparkRealTimings
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], 'real': []}[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '16-expander', '20-expander', '30-expander',
'40-expander', '50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=500000,
dataset='susysvm',
starting_weights_domain=[-0.5, 2],
max_iter=2000,
max_time=None,
alpha=5e-2,
learning_rate='constant',
spectrum_dependent_learning_rate=False,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='hinge_loss',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'fixedseed_real_svm',
# 'dataset',
# 'w_domain',
'nodes',
'distr',
'metrics',
'alpha',
'samp',
# 'feat',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
def test_exp_on_unisvm2_dataset(seed=None, n=100, distr='par', metrics_nodes='all', alert=True):
if alert:
print('test_exp_on_unisvm2_dataset()')
print('n={}, distr={}, metrics_nodes={}'.format(n, distr, metrics_nodes))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], }[distr]
graphs = {
100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '30-expander', '40-expander',
'50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
}[n]
metrics_type = {'worst': 2, 'all': 0}[metrics_nodes]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
dataset='unisvm2',
starting_weights_domain=[1, 1],
max_iter=1000,
max_time=None,
alpha=0.05,
learning_rate='constant',
spectrum_dependent_learning_rate=True,
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
obj_function='hinge_loss',
metrics=[],
metrics_type=metrics_type,
metrics_nodes=metrics_nodes,
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'us2000',
'dataset',
'alpha',
'nodes',
'shuffle',
'distr',
'metrics',
'time',
'iter'
],
test_parent_folder="",
instant_plot=False,
plots=['hinge_loss_iter', 'hinge_loss_time'],
save_plot_to_file=True,
plot_global_w=False,
plot_node_w=False
)
def test_different_nodes_speed(seed=None, n=100, max_iter=1000, distr='exp', alert=True):
if alert:
print('test_different_nodes_timing()')
print('seed={}, n={}, distr={}'.format(seed, n, distr))
input("click [ENTER] to continue or [CTRL]+[C] to abort")
time_distr_class = {
'exp': statistics.ExponentialDistribution, 'unif': statistics.UniformDistribution,
'par': statistics.Type2ParetoDistribution
}[distr]
time_distr_param = {'exp': [[1]], 'unif': [[0, 2]], 'par': [[3, 2]], }[distr]
"""100: ['2-expander', '3-expander', '4-expander', '8-expander', '10-expander', '20-expander', '50-expander',
'80-expander', '99-clique', ],
400: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '50-expander', '100-expander',
'200-expander', '300-expander', '399-clique', ],
1000: ['2-expander', '3-expander', '4-expander', '8-expander', '20-expander', '30-expander', '40-expander',
'50-expander', '100-expander', '200-expander', '500-expander', '999-clique', ]
"""
graphs = {
# 10: ['2-cycle', '3-cycle', '5-cycle'],
100: [
'4-expander',
'10-expander',
'50-expander'
],
300: [
'5-expander',
'17-expander',
'150-expander'
],
1000: [
'6-expander',
'32-expander',
'500-expander'
]
}[n]
simulator.run(
seed=seed,
n=n,
graphs=graphs,
n_samples=1000,
dataset='unireg',
starting_weights_domain=[0, 0],
max_iter=max_iter,
max_time=None,
method=None,
alpha=0,
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=None,
time_const_weight=0,
obj_function='mse',
real_metrics_toggle=False,
save_test_to_file=True,
test_folder_name_struct=[
'speed_003',
# 'shuffle',
# 'w_domain',
# 'metrics',
# 'dataset',
'nodes',
'distr',
# 'distr_rule',
# 'error',
# 'nodeserror',
# 'alpha',
# 'samp',
# 'feat',
'time',
'iter',
'c',
# 'method',
],
test_parent_folder="",
instant_plot=False,
plots=[],
save_plot_to_file=False
)
def test_classic_gd():
input("sim_spectral_ratios()... click [ENTER] to continue or [CTRL]+[C] to abort")
simulator.run(
seed=22052010,
n=100,
graphs=[
# "0-diagonal",
# "1-cycle",
# "2-uniform_edges",
# "2-cycle",
'2-expander',
# "3-uniform_edges",
# "3-cycle",
# '3-expander',
# "4-uniform_edges",
# "4-cycle",
'4-expander',
# "5-uniform_edges",
# "5-cycle",
# '5-expander',
# "8-uniform_edges",
# "8-cycle",
'8-expander',
# "10-uniform_edges",
# "10-cycle",
# '10-expander',
# "20-uniform_edges",
# "20-cycle",
# '18-expander',
'20-expander',
# '30-expander',
# '40-expander',
# "50-uniform_edges",
# "50-cycle",
# '50-expander',
# "80-uniform_edges",
# "80-cycle",
# '80-expander',
# '100-expander',
# '200-expander',
# '300-expander',
# '500-expander',
# '80-expander',
"99-clique",
],
n_samples=1000,
n_features=100,
dataset='reg2',
error_std_dev=1.0,
node_error_std_dev=0.0,
starting_weights_domain=[-10, 50],
max_iter=500,
max_time=None,
method='classic',
alpha=1e-3,
learning_rate='constant',
time_distr_class=statistics.Type2ParetoDistribution,
time_distr_param=[[3, 2]],
time_distr_param_rule=None,
time_const_weight=0,
obj_function='mse',
spectrum_dependent_learning_rate=True,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
real_metrics_toggle=False,
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'n006',
'dataset',
'w_domain',
'alpha',
'shuffle',
'metrics',
'distr',
'error',
'nodeserror',
'nodes',
'samp',
'feat',
'time',
'iter',
'c',
'method',
],
test_parent_folder="",
instant_plot=True,
plots=('mse_iter', 'mse_time'),
save_plot_to_file=True,
plot_global_w=True,
plot_node_w=False
)
def test_unisvm2_dataset(distr):
time_distr_class = [
statistics.ExponentialDistribution,
statistics.UniformDistribution,
statistics.Type2ParetoDistribution
][distr]
time_distr_param = [
[[1]],
[[0, 2]],
[[3, 2]],
][distr]
input("test_unisvm_dataset()... click [ENTER] to continue or [CTRL]+[C] to abort")
simulator.run(
seed=22052010,
n=100,
graphs=[
# "0-diagonal",
# "1-cycle",
# "2-uniform_edges",
# "2-cycle",
'2-expander',
# "3-uniform_edges",
# "3-cycle",
'3-expander',
# "4-uniform_edges",
# "4-cycle",
'4-expander',
# "5-uniform_edges",
# "5-cycle",
# '5-expander',
# "8-uniform_edges",
# "8-cycle",
'8-expander',
# "10-uniform_edges",
# "10-cycle",
'10-expander',
# "20-uniform_edges",
# "20-cycle",
# '18-expander',
'20-expander',
# '30-expander',
# '40-expander',
# "50-uniform_edges",
# "50-cycle",
'50-expander',
# "80-uniform_edges",
# "80-cycle",
# '80-expander',
# '100-expander',
# '200-expander',
# '300-expander',
# '500-expander',
# '80-expander',
"99-clique",
],
dataset='unisvm2',
smv_label_flip_prob=0.05,
starting_weights_domain=[1, 1],
max_time=None,
max_iter=1000,
alpha=0.05,
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=None,
time_const_weight=0,
obj_function='cont_hinge_loss',
spectrum_dependent_learning_rate=False,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
real_metrics_toggle=False,
method='classic',
shuffle=True,
save_test_to_file=True,
test_folder_name_struct=[
'us2003',
'dataset',
# 'w_domain',
'alpha',
'shuffle',
'metrics',
'distr',
# 'error',
# 'nodeserror',
'nodes',
# 'samp',
# 'feat',
'time',
'iter',
# 'c',
# 'method',
],
test_parent_folder="",
instant_plot=False,
plots=('cont_hinge_loss_iter', 'hinge_loss_time'),
save_plot_to_file=True,
plot_global_w=False,
plot_node_w=False
)
def test_unisvm_dataset(distr):
time_distr_class = [
statistics.ExponentialDistribution,
statistics.UniformDistribution,
statistics.Type2ParetoDistribution
][distr]
time_distr_param = [
[[1]],
[[0, 2]],
[[3, 2]],
][distr]
input("test_unisvm_dataset()... click [ENTER] to continue or [CTRL]+[C] to abort")
simulator.run(
seed=22052010,
n=100,
graphs=[
# "0-diagonal",
# "1-cycle",
# "2-uniform_edges",
# "2-cycle",
'2-expander',
# "3-uniform_edges",
# "3-cycle",
'3-expander',
# "4-uniform_edges",
# "4-cycle",
'4-expander',
# "5-uniform_edges",
# "5-cycle",
# '5-expander',
# "8-uniform_edges",
# "8-cycle",
'8-expander',
# "10-uniform_edges",
# "10-cycle",
'10-expander',
# "20-uniform_edges",
# "20-cycle",
# '18-expander',
'20-expander',
# '30-expander',
# '40-expander',
# "50-uniform_edges",
# "50-cycle",
'50-expander',
# "80-uniform_edges",
# "80-cycle",
'80-expander',
# '100-expander',
# '200-expander',
# '300-expander',
# '500-expander',
# '80-expander',
"99-clique",
],
dataset='unisvm',
starting_weights_domain=[1, 1],
max_time=None,
max_iter=800,
alpha=0.1, # * math.sqrt(2),
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=None,
time_const_weight=0,
obj_function='cont_hinge_loss',
spectrum_dependent_learning_rate=False,
metrics=[],
metrics_type=2,
metrics_nodes='worst',
epsilon=-math.inf,
real_metrics_toggle=False,
method='classic',
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'us006',
'dataset',
# 'w_domain',
'alpha',
'shuffle',
'metrics',
'distr',
# 'error',
# 'nodeserror',
'nodes',
# 'samp',
# 'feat',
'time',
'iter',
# 'c',
# 'method',
],
test_parent_folder="",
instant_plot=True,
plots=('cont_hinge_loss_iter', 'cont_hinge_loss_time'),
save_plot_to_file=True,
plot_global_w=True,
plot_node_w=False
)
def test_over_c():
for s in range(16):
for c in [1, 0.999, 0.997, 0.995, 0.992, 0.985, 0.950, 0.9, 0.75, 0.5, 0.3, 0.1, 0]:
try:
simulator.run(
seed=s * 77,
n=100,
graphs=[
# "0-diagonal",
"1-cycle",
"2-uniform_edges",
"2-cycle",
"3-uniform_edges",
"3-cycle",
"4-uniform_edges",
"4-cycle",
# "5-uniform_edges",
# "5-cycle",
"8-uniform_edges",
"8-cycle",
# "10-uniform_edges",
# "10-cycle",
"20-uniform_edges",
"20-cycle",
"50-uniform_edges",
"50-cycle",
"80-uniform_edges",
"80-cycle",
"99-clique"
],
n_samples=1000,
n_features=100,
dataset='reg',
error_std_dev=1.0,
starting_weights_domain=[-2.5, 10.60],
max_iter=None,
max_time=100,
method='classic',
alpha=1e-4,
learning_rate='root-decreasing',
time_distr_class=statistics.ExponentialDistribution,
time_distr_param=(1,),
time_const_weight=c,
save_test_to_file=True,
test_folder_name_struct=(
'y03',
'w_domain',
'dataset',
'distr',
'error',
'nodeserror',
'alpha',
'nodes',
'samp',
'feat',
'time',
'iter',
'c'
),
test_parent_folder="y03",
instant_plot=False,
save_plot_to_file=False,
plot_global_w=False,
plot_node_w=False
)
except:
continue
def test_spectral_ratios():
input("test_spectral_ratios()... click [ENTER] to continue or [CTRL]+[C] to abort")
simulator.run(
seed=1531934976,
n=100,
graphs=[
"0-diagonal",
"1-cycle",
# "2-uniform_edges",
"2-cycle",
'2-expander',
# "3-uniform_edges",
# "3-cycle",
# '3-expander',
# "4-uniform_edges",
"4-cycle",
'4-expander',
# "5-uniform_edges",
# "5-cycle",
# '5-expander',
# "8-uniform_edges",
"8-cycle",
'8-expander',
# "10-uniform_edges",
# "10-cycle",
# '10-expander',
# "20-uniform_edges",
"20-cycle",
'20-expander',
# "50-uniform_edges",
"50-cycle",
'50-expander',
# "80-uniform_edges",
# "80-cycle",
# '80-expander',
"99-clique",
],
n_samples=100,
dataset='unireg',
starting_weights_domain=[-70, -50],
max_iter=100,
alpha=0.01,
learning_rate='constant',
time_distr_class=statistics.ExponentialDistribution,
time_distr_param=[[1]],
time_distr_param_rule=None,
time_const_weight=0,
spectrum_dependent_learning_rate=False,
metrics_type=0,
metrics_nodes='all',
method='classic',
shuffle=False,
save_test_to_file=True,
test_folder_name_struct=[
'ux004',
'w_domain',
'alpha',
'shuffle',
'dataset',
'metrics',
'distr',
# 'error',
# 'nodeserror',
'nodes',
# 'samp',
# 'feat',
# 'time',
'iter',
# 'c',
# 'method',
],
test_parent_folder="",
instant_plot=True,
plots=('mse_iter',),
save_plot_to_file=True,
plot_global_w=True,
plot_node_w=[0, 5, 10, 20, 50, 55, 60, 70]
)
def test_different_100nodes_timing_loop(index):
S = [
# [1, None],
# [2, None],
# [10, None],
# [2, 'alternate'],
# [10, 'alternate'],
# [2, 'split'],
# [10, 'split'],
[3, 'alternate'],
[4, 'alternate'],
[8, 'alternate'],
[15, 'alternate'],
[20, 'alternate'],
[3, 'split'],
[4, 'split'],
[8, 'split'],
[15, 'split'],
[20, 'split'],
]
for mu_slow, rule in S:
time_distr_class = [
statistics.ExponentialDistribution,
statistics.UniformDistribution,
statistics.Type2ParetoDistribution
][index]
time_distr_param = [
[[1 / mu_slow], [1]],
[[0, 2 * mu_slow], [0, 2]],
[[3, 2 * mu_slow], [3, 2]],
][index]
simulator.run(
seed=18062018,
n=100,
graphs=[
"0-diagonal",
"1-cycle",
"2-cycle",
# '2-expander',
"3-cycle",
# '3-expander',
"4-cycle",
# '4-expander',
"8-cycle",
# '8-expander',
"10-cycle",
# '10-expander',
"20-cycle",
# '20-expander',
"50-cycle",
# '50-expander',
"99-clique",
],
n_samples=1000,
dataset='unireg',
starting_weights_domain=[0, 0],
max_iter=None,
max_time=10000,
method=None,
alpha=0,
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=rule,
time_const_weight=0,
obj_function='mse',
real_metrics_toggle=False,
save_test_to_file=True,
test_folder_name_struct=[
'hxx01_hetertime',
# 'shuffle',
# 'w_domain',
# 'metrics',
# 'dataset',
'distr',
'distr_rule',
# 'error',
# 'nodeserror',
# 'alpha',
'nodes',
# 'samp',
# 'feat',
'time',
'iter',
'c',
# 'method',
],
test_parent_folder="",
instant_plot=False,
plots=[
'iter_time',
'avg_iter_time'
],
save_plot_to_file=True
)
def test_different_1000nodes_timing_loop(index):
S = [
[1, None],
[2, None],
[10, None],
[2, 'alternate'],
[10, 'alternate'],
[2, 'split'],
[10, 'split'],
]
for mu_slow, rule in S:
time_distr_class = [
statistics.ExponentialDistribution,
statistics.UniformDistribution,
statistics.Type2ParetoDistribution
][index]
time_distr_param = [
[[1 / mu_slow], [1]],
[[0, 2 * mu_slow], [0, 2]],
[[3, 2 * mu_slow], [3, 2]],
][index]
simulator.run(
seed=18062018,
n=1000,
graphs=[
"0-diagonal",
"1-cycle",
"2-cycle",
'2-expander',
"4-cycle",
'4-expander',
"20-cycle",
'20-expander',
"50-cycle",
'50-expander',
"100-cycle",
'100-expander',
"200-cycle",
'200-expander',
"500-cycle",
'500-expander',
"999-clique",
],
n_samples=1000,
dataset='unireg',
starting_weights_domain=[0, 0],
max_iter=None,
max_time=10000,
method=None,
alpha=0,
learning_rate='constant',
time_distr_class=time_distr_class,
time_distr_param=time_distr_param,
time_distr_param_rule=rule,
time_const_weight=0,
obj_function='mse',
real_metrics_toggle=False,
save_test_to_file=True,
test_folder_name_struct=[
'hxx01_hetertime',
# 'shuffle',
# 'w_domain',
# 'metrics',
# 'dataset',
'distr',
'distr_rule',
# 'error',
# 'nodeserror',
# 'alpha',
'nodes',
# 'samp',
# 'feat',
'time',
'iter',
'c',
# 'method',
],
test_parent_folder="",
instant_plot=False,
plots=[
'iter_time',
'avg_iter_time'
],
save_plot_to_file=True
)
def test_eigenvalue_computation_suite():
for n in [1000, 500, 200, 100, 50, 10]:
for d in [1, 2, 3, 4, 5, 8, 10, 20, 40, 50, 80, 99]:
[v1, v2] = test_eigenvalue_computation(n, d)
print("n={}, d={}, ({}, {}) diff={}".format(n, d, v1, v2, abs(v1 - v2)))
def test_eigenvalue_computation(N, d):
A = graphs.generate_n_cycle_d_regular_graph_by_degree(N, d)
NA = normalize(A, axis=1, norm='l1')
v1 = uniform_weighted_Pn_second_eigenvalue_from_adjacency_matrix(A)
v2 = (math.sin(math.pi * (d + 1) / N) / math.sin(math.pi / N)) / (d + 1)
return [v1, v2]
def compute_velocities():
test_folder_path = "./test_log/test_h002_hetertime_exp0.5_100n_1000time_INFiter_0c"
logs, setup = load_test_logs(test_folder_path, return_setup=True)
for graph in setup['graphs']:
speed = statistics.single_iter_velocity_from_logs(
logs['avg_iter_time'][graph],
logs['avg_iter_time']['0-diagonal'],
setup['max_time'],
setup['n']
)
print("{} speed = {}".format(graph, speed))
"""
y = np.array([-1, 1, 1, 1, -1, 1])
y_hat_f = np.array([-1, -1, -1, 1, -1, 1])
X = np.array([[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3],
[0, 1, 2, 3]])
print(mltoolbox.HingeLossFunction.f_gradient(y, y_hat_f, X))
print(mltoolbox.HingeLossFunction.f_gradient2(y, y_hat_f, X))
"""
"""
Ks = [0,1,2,3,4,8,20,50,99]
exp_str = "------------------------------\nExponential (lambda=1)\n------------------------------\n"
par_str = "------------------------------\nPareto Type 2 (alpha=3, sigma=2)\n------------------------------\n"
uni_str = "------------------------------\nUniform (a=0, b=2)\n------------------------------\n"
bounds = [
("memoryless lb", statistics.single_iteration_velocity_memoryless_lower_bound),
("residual time lb", statistics.single_iteration_velocity_residual_lifetime_lower_bound),
("don bound", statistics.single_iteration_velocity_don_bound),
("upper bound", statistics.single_iteration_velocity_upper_bound),
]
for bound in bounds:
exp_str+="{}\n".format(bound[0])
par_str+="{}\n".format(bound[0])
uni_str+="{}\n".format(bound[0])
for k in Ks:
exp_str += "(k={}, {})\n".format(k, bound[1](
k,
statistics.ExponentialDistribution,
[1]
))
par_str += "(k={}, {})\n".format(k, bound[1](
k,
statistics.Type2ParetoDistribution,
[3,2]
))
uni_str += "(k={}, {})\n".format(k, bound[1](
k,
statistics.UniformDistribution,
[0,2]
))
exp_str+="\n"
par_str+="\n"
uni_str+="\n"
print(exp_str)
print(par_str)
print(uni_str)
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Run test simulations'
)
parser.add_argument(
'-c', '--core',
action='store',
default=-1,
required=False,
help='Specify core test suite',
dest='core'
)
args = parser.parse_args()
run(int(args.core))
| 30.614252
| 120
| 0.503376
| 5,391
| 51,983
| 4.59655
| 0.065109
| 0.040678
| 0.033898
| 0.021792
| 0.875908
| 0.842817
| 0.83293
| 0.818119
| 0.813438
| 0.78753
| 0
| 0.057012
| 0.350461
| 51,983
| 1,697
| 121
| 30.632292
| 0.676884
| 0.059154
| 0
| 0.810934
| 0
| 0
| 0.166529
| 0.010095
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020501
| false
| 0
| 0.003797
| 0
| 0.025816
| 0.018223
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae861135f92db4436ae78c1752624c7f1de65d68
| 20,049
|
py
|
Python
|
regression/daily/orderer_ote.py
|
SunHaoli/fabric-test
|
15ad4464514b4bb4b2e6ad04e2445a57ab8a9033
|
[
"Apache-2.0"
] | 1
|
2019-08-16T07:39:27.000Z
|
2019-08-16T07:39:27.000Z
|
regression/daily/orderer_ote.py
|
blocklimemal/fabric-test
|
37ff762f9f6ca1ee09c0496207f88287e95e729c
|
[
"Apache-2.0"
] | null | null | null |
regression/daily/orderer_ote.py
|
blocklimemal/fabric-test
|
37ff762f9f6ca1ee09c0496207f88287e95e729c
|
[
"Apache-2.0"
] | 2
|
2017-12-04T15:01:33.000Z
|
2018-08-21T15:17:47.000Z
|
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
import unittest
import subprocess
import os
tool_directory = '../../tools/OTE'
logs_directory = './ote_logs'
TEST_PASS_STRING = "RESULT=PASSED"
if not os.path.exists(logs_directory):
os.makedirs(logs_directory)
class perf_orderer(unittest.TestCase):
def test_FAB_6996_30ktx_1ch_solo(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-6996_30ktx_1ch_solo.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-6996_30ktx_1ch_solo 2>&1",
shell=True,
#stderr=subprocess.STDOUT, #Uncomment this two lines to see the stdout
#stdout=subprocess.STDOUT,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7070_30ktx_1ch_solo_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7070_30ktx_1ch_solo_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7070_30ktx_1ch_solo_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7024_30ktx_1ch_solo_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7024_30ktx_1ch_solo_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7024_30ktx_1ch_solo_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7071_30ktx_1ch_solo_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7071_30ktx_1ch_solo_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7071_30ktx_1ch_solo_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7026_30ktx_3ch_solo(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service in 3 channels,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7026_30ktx_3ch_solo.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7026_30ktx_3ch_solo 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7072_30ktx_3ch_solo_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service in 3 channels,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7072_30ktx_3ch_solo_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7072_30ktx_3ch_solo_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7027_30ktx_3ch_solo_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service in 3 channels,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7027_30ktx_3ch_solo_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7027_30ktx_3ch_solo_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7073_30ktx_3ch_solo_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service in 3 channels,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7073_30ktx_3ch_solo_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7073_30ktx_3ch_solo_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7036_30ktx_1ch_3ord_5kb(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7036_30ktx_1ch_3ord_5kb.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7036_30ktx_1ch_3ord_5kb 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7074_15ktx_1ch_3ord_5kb_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7074_15ktx_1ch_3ord_5kb_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7074_15ktx_1ch_3ord_5kb_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7037_30ktx_1ch_3ord_5kb_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7037_30ktx_1ch_3ord_5kb_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7037_30ktx_1ch_3ord_5kb_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7075_15ktx_1ch_3ord_5kb_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7075_15ktx_1ch_3ord_5kb_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7075_15ktx_1ch_3ord_5kb_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7038_30ktx_3ch_3ord_5kb(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7038_30ktx_3ch_3ord_5kb.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7038_30ktx_3ch_3ord_5kb 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7076_15ktx_3ch_3ord_5kb_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7076_15ktx_3ch_3ord_5kb_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7076_15ktx_3ch_3ord_5kb_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7039_30ktx_3ch_3ord_5kb_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7039_30ktx_3ch_3ord_5kb_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7039_30ktx_3ch_3ord_5kb_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7077_15ktx_3ch_3ord_5kb_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7077_15ktx_3ch_3ord_5kb_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7077_15ktx_3ch_3ord_5kb_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7058_30ktx_1ch_6ord_5kb(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7058_30ktx_1ch_6ord_5kb.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7058_30ktx_1ch_6ord_5kb 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7078_15ktx_1ch_6ord_5kb_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7078_15ktx_1ch_6ord_5kb_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7078_15ktx_1ch_6ord_5kb_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7059_30ktx_1ch_6ord_5kb_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7059_30ktx_1ch_6ord_5kb_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7059_30ktx_1ch_6ord_5kb_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7079_15ktx_1ch_6ord_5kb_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7079_15ktx_1ch_6ord_5kb_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7079_15ktx_1ch_6ord_5kb_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7060_30ktx_3ch_6ord_5kb(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7060_30ktx_3ch_6ord_5kb.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7060_30ktx_3ch_6ord_5kb 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7080_15ktx_3ch_6ord_5kb_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service, and verify
delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7080_15ktx_3ch_6ord_5kb_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7080_15ktx_3ch_6ord_5kb_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7061_30ktx_3ch_6ord_5kb_500batchsize(self):
'''
Using one broadcast client thread per channel per orderer,
send 30000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7061_30ktx_3ch_6ord_5kb_500batchsize.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7061_30ktx_3ch_6ord_5kb_500batchsize 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
def test_FAB_7081_15ktx_3ch_6ord_5kb_500batchsize_10kpayload(self):
'''
Using one broadcast client thread per channel per orderer,
send 15000 transactions through the ordering service with batchsize 500,
and verify delivery using an equal number of deliver clients.
Refer to the logs to also see the TPS throughput rate.
'''
with open(os.path.join(logs_directory, "ote_FAB-7081_15ktx_3ch_6ord_5kb_500batchsize_10kpayload.log"), "w") as logfile:
result = subprocess.check_output("./runote.sh -t FAB-7081_15ktx_3ch_6ord_5kb_500batchsize_10kpayload 2>&1",
shell=True,
cwd=tool_directory)
print(result)
logfile.write(result)
self.assertIn(TEST_PASS_STRING, result)
| 52.622047
| 127
| 0.612449
| 2,431
| 20,049
| 4.830522
| 0.05471
| 0.02989
| 0.034063
| 0.042579
| 0.971217
| 0.963212
| 0.938687
| 0.90803
| 0.895768
| 0.895768
| 0
| 0.072215
| 0.320365
| 20,049
| 380
| 128
| 52.760526
| 0.789593
| 0.296124
| 0
| 0.597015
| 0
| 0
| 0.197345
| 0.155267
| 0
| 0
| 0
| 0
| 0.119403
| 1
| 0.119403
| false
| 0.124378
| 0.014925
| 0
| 0.139303
| 0.119403
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8894b9a709e1e7aed367575f8751e5cabbf691ab
| 52,150
|
py
|
Python
|
pyisstelemetry/module_dictionary.py
|
BApplB/py-iss-telemetry
|
94a863a4b7d19c96872999047449de3b0cd3c0bc
|
[
"Apache-2.0"
] | 1
|
2020-11-13T23:02:05.000Z
|
2020-11-13T23:02:05.000Z
|
pyisstelemetry/module_dictionary.py
|
BApplB/py-iss-telemetry
|
94a863a4b7d19c96872999047449de3b0cd3c0bc
|
[
"Apache-2.0"
] | null | null | null |
pyisstelemetry/module_dictionary.py
|
BApplB/py-iss-telemetry
|
94a863a4b7d19c96872999047449de3b0cd3c0bc
|
[
"Apache-2.0"
] | 1
|
2020-09-22T13:28:08.000Z
|
2020-09-22T13:28:08.000Z
|
modules_dicts = [
{
"OPCODE": "AIRLOCK000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000016",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000017",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000018",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000019",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000020",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000021",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000022",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000023",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000024",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000025",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000026",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000027",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000028",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000029",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000030",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000031",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000032",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000033",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000034",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000035",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000036",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000037",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000038",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000039",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000040",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000041",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000042",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000043",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000044",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000045",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000046",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000047",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000048",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000049",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000050",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000051",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000052",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000053",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000054",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000055",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000056",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000057",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000017",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000018",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000019",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000053",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000054",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000055",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000056",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000057",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000058",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000059",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000060",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000061",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000062",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000063",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000064",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000065",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "AIRLOCK000058",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE1000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE1000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE2000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000016",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "NODE3000020",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P3000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P3000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S3000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S3000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000066",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000067",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000068",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000069",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000070",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000071",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000072",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000073",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000074",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000075",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000076",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000077",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000078",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000079",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000080",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P4000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P6000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S4000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S6000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "P1000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000088",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000089",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000090",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000091",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000092",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000093",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000094",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000095",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000096",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000097",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000098",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000099",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000100",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000101",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S0000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000081",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000016",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000017",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000018",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000019",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000020",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000021",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000022",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000023",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000024",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "S1000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000016",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000017",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000018",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000019",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000020",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000021",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000022",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000023",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000024",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000025",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000026",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000027",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000028",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000029",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000030",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000031",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000038",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000039",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000040",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000041",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000042",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000043",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000044",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000045",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000046",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000047",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000048",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000049",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000050",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000051",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000052",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "Z1000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "RUSSEG000025",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000032",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000033",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000034",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000035",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000036",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000037",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000082",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000083",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000084",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000085",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000087",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000086",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "USLAB000102",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "TIME_000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "TIME_000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMT000001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMT000002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASSRMS011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0005",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0006",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0007",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0008",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0009",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0010",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0011",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0012",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0013",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0014",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0015",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0016",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0017",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0018",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0019",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0020",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0021",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSASPDM0022",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMBS00001",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMBS00002",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMBA00003",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
{
"OPCODE": "CSAMBA00004",
"NAME": "null",
"DESCRIPTION": "null",
"SUBSYSTEM": "null",
"VALUE": "null",
},
]
| 22.078747
| 34
| 0.398888
| 3,374
| 52,150
| 6.164493
| 0.102549
| 0.129622
| 0.307851
| 0.372662
| 0.826434
| 0.826434
| 0.826434
| 0.826434
| 0.82427
| 0.005289
| 0
| 0.064274
| 0.387804
| 52,150
| 2,361
| 35
| 22.088098
| 0.587202
| 0
| 0
| 0.570945
| 0
| 0
| 0.398639
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88b6bd2b7093edf6d365d08688d723c6cbb11ff9
| 16,901
|
py
|
Python
|
capt/connector/switch.py
|
tmanfree/capt
|
a6c1c12bb2677aef718f550c5fa7ffd4b71dedd4
|
[
"MIT"
] | null | null | null |
capt/connector/switch.py
|
tmanfree/capt
|
a6c1c12bb2677aef718f550c5fa7ffd4b71dedd4
|
[
"MIT"
] | null | null | null |
capt/connector/switch.py
|
tmanfree/capt
|
a6c1c12bb2677aef718f550c5fa7ffd4b71dedd4
|
[
"MIT"
] | null | null | null |
# system imports
import json
# 3rd part imports
import requests
# local imports
from connector.connector import Connector
class Switch(Connector):
# --- POST calls
# Forcing a sync is broken only with switches on IOS-XE 03.03.03 code base
def sync(self, dev_ipv4_address):
url = "https://{}/webacs/api/v3/op/devices/syncDevices.json".format(self.cpi_ipv4_address)
payload = {"syncDevicesDTO": {"devices": {"device": [{"address": "{}".format(dev_ipv4_address)}]}}}
result = self.error_handling(requests.post, 5, url, False, self.username, self.password, payload)
def sync_multiple(self,dev_ipv4_list):
url = "https://{}/webacs/api/v4/op/devices/syncDevices.json".format(self.cpi_ipv4_address)
payload = {"syncDevicesDTO": {"devices": {"device": dev_ipv4_list}}}
result = self.error_handling(requests.post, 5, url, False, self.username, self.password, payload)
# --- PUT calls (usually requires templates built in Prime to execute)
def reload(self, dev_id, timeout):
url = "https://{}/webacs/api/v3/op/cliTemplateConfiguration/deployTemplateThroughJob.json".format(
self.cpi_ipv4_address)
payload = \
{
"cliTemplateCommand": {
"targetDevices": {
"targetDevice": [{
"targetDeviceID": "{}".format(dev_id),
"variableValues": {
"variableValue": [{
"name": "waittimeout",
"value": "{}".format(timeout)
}]
}
}]
},
"templateName": "API_CALL_reload_switch"
}
}
result = self.error_handling(requests.put, 5, url, False, self.username, self.password, payload)
key_list = ['mgmtResponse', 'cliTemplateCommandJobResult', 0, 'jobName']
return self.parse_json.value(result.json(), key_list, self.logger)
def conf_if_vlan(self, dev_id, interface, sw_port_type, access_vlan):
url = "https://{}/webacs/api/v3/op/cliTemplateConfiguration/deployTemplateThroughJob.json".format(
self.cpi_ipv4_address)
payload = \
{
"cliTemplateCommand": {
"targetDevices": {
"targetDevice": [{
"targetDeviceID": "{}".format(dev_id),
"variableValues": {
"variableValue": [{
"name": "InterfaceName",
"value": "{}".format(interface)
}, {
"name": "Description",
"value": ""
}, {
"name": "A1",
"value": "{}".format(sw_port_type)
}, {
"name": "StaticAccessVLan",
"value": "{}".format(access_vlan)
}, {
"name": "TrunkAllowedVLan",
"value": ""
}, {
"name": "VoiceVlan",
"value": ""
}, {
"name": "NativeVLan",
"value": ""
}, {
"name": "PortFast",
"value": ""
}, {
"name": "duplexField",
"value": ""
}, {
"name": "spd",
"value": ""
}]
}
}]
},
"templateName": "API_CALL_conf_if_vlan"
}
}
result = self.error_handling(requests.put, 5, url, False, self.username, self.password, payload)
key_list = ['mgmtResponse', 'cliTemplateCommandJobResult', 0, 'jobName']
return self.parse_json.value(result.json(), key_list, self.logger)
def conf_if_bas(self, dev_id, interface, description, access_vlan):
url = "https://{}/webacs/api/v3/op/cliTemplateConfiguration/deployTemplateThroughJob.json".format(
self.cpi_ipv4_address)
payload = \
{
"cliTemplateCommand": {
"targetDevices": {
"targetDevice": [{
"targetDeviceID": "{}".format(dev_id),
"variableValues": {
"variableValue": [{
"name": "InterfaceName",
"value": "{}".format(interface)
}, {
"name": "Description",
"value": "{}".format(description)
}, {
"name": "StaticAccessVLan",
"value": "{}".format(access_vlan)
}]
}
}]
},
"templateName": "API_CALL_conf_if_bas"
}
}
result = self.error_handling(requests.put, 5, url, False, self.username, self.password, payload)
key_list = ['mgmtResponse', 'cliTemplateCommandJobResult', 0, 'jobName']
return self.parse_json.value(result.json(), key_list, self.logger)
def conf_template(self, dev_ids, template_name):
url = "https://{}/webacs/api/v3/op/cliTemplateConfiguration/deployTemplateThroughJob.json".format(
self.cpi_ipv4_address)
payload = \
{
"cliTemplateCommand": {
"targetDevices": {
"targetDevice": dev_ids
},
"templateName": template_name
}
}
result = self.error_handling(requests.put, 5, url, False, self.username, self.password, payload)
key_list = ['mgmtResponse', 'cliTemplateCommandJobResult', 0, 'jobName']
return self.parse_json.value(result.json(), key_list, self.logger)
# --- GET calls
def id_by_ip(self, dev_ipv4_address):
url = "https://{}/webacs/api/v3/data/Devices.json?ipAddress=\"{}\"".format(self.cpi_ipv4_address,
dev_ipv4_address)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entityId', 0, '$']
return self.parse_json.value(result.json(), key_list, self.logger)
def ids_by_desc(self, desc, sw_name):
modified_desc_list = self.parse_var.desc_id_split(desc,"ðernetInterface.description=contains(")
if sw_name is not None:
url = "https://{}/webacs/api/v3/data/InventoryDetails.json?.and_filter=true&summary.deviceName=contains({}){}&.case_sensitive=false".format(
self.cpi_ipv4_address,sw_name, modified_desc_list)
else:
url = "https://{}/webacs/api/v3/data/InventoryDetails.json?.and_filter=true{}&.case_sensitive=false".format(self.cpi_ipv4_address, modified_desc_list)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
list_json = self.parse_json.value(result.json(),['queryResponse', 'entityId'], self.logger)
id_list = self.parse_json.ids_list(list_json)
return id_list
def ids_by_location(self, sw_location):
if sw_location.lower() == "edgenet":
url = "https://{}/webacs/api/v3/data/Devices.json?.and_filter=true&.group=Edge%20Networking%20Devices&.maxResults=1000".format(
self.cpi_ipv4_address)
elif sw_location.lower() == 'fmnet':
url = "https://{}/webacs/api/v3/data/Devices.json?.and_filter=true&.group=FMnet&.maxResults=1000".format(
self.cpi_ipv4_address)
elif sw_location.lower() == 'corenet':
url = "https://{}/webacs/api/v3/data/Devices.json?.and_filter=true&.group=Core%20Networking&.maxResults=1000".format(
self.cpi_ipv4_address)
else:
url = "https://{}/webacs/api/v3/data/Devices.json?.and_filter=true&.group=Edge%20Networking%20Devices&deviceName=startsWith({})".format(
self.cpi_ipv4_address, sw_location)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
list_json = self.parse_json.value(result.json(),['queryResponse', 'entityId'], self.logger)
id_list = self.parse_json.ids_list(list_json)
return id_list
def id_by_mac(self, address):
url = "https://{}/webacs/api/v3/data/Devices.json?macAddress=\"{}\"".format(self.cpi_ipv4_address, address)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entityId', 0, '$']
return self.parse_json.value(result.json(), key_list, self.logger)
def id_by_hostname(self, dev_hostname):
url = "https://{}/webacs/api/v3/data/Devices.json?deviceName=contains({})&.case_sensitive=false".format(
self.cpi_ipv4_address, dev_hostname)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entityId', 0, '$']
dev_id = self.parse_json.value(result.json(), key_list, self.logger)
return dev_id
def sync_status(self, dev_id):
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'devicesDTO', 'collectionStatus']
return self.parse_json.value(result.json(), key_list, self.logger)
def sync_time(self, dev_id):
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'devicesDTO', 'collectionTime']
return self.parse_json.value(result.json(), key_list, self.logger)
def reachability(self, dev_id):
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'devicesDTO', 'reachability']
return self.parse_json.value(result.json(), key_list, self.logger)
def software_version(self, dev_id):
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'devicesDTO', 'softwareVersion']
return self.parse_json.value(result.json(), key_list, self.logger)
# Switch chassis info
def stack_members(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'inventoryDetailsDTO', 'chassis', 'chassis']
return self.parse_json.value(result.json(), key_list, self.logger)
# CDP neighbour info gets populated when the device syncs
def cdp_neighbours(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'inventoryDetailsDTO', 'cdpNeighbors', 'cdpNeighbor']
return self.parse_json.value(result.json(), key_list, self.logger)
def ports(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'inventoryDetailsDTO', 'ethernetInterfaces', 'ethernetInterface']
return self.parse_json.value(result.json(), key_list, self.logger)
def physical_ports(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'inventoryDetailsDTO', 'physicalPorts', 'physicalPort']
return self.parse_json.value(result.json(), key_list, self.logger)
def port_detail_dict(self, dev_id, interface_name):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'inventoryDetailsDTO', 'ethernetInterfaces', 'ethernetInterface']
interface_list_of_dicts = self.parse_json.value(result, key_list, self.logger)
for interface_dict in interface_list_of_dicts:
for key in interface_dict: # iterating over dict's return keys only
if interface_dict[key] == interface_name:
return interface_dict
def json_basic(self, dev_id):
# API v3 call is deprecated, need to change when Cisco Prime is upgraded
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
return result.json()
def json_detailed(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
return result.json()
# --- print API calls, mainly for testing
def print_json_info(self, dev_id):
url = "https://{}/webacs/api/v3/data/Devices/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
print(json.dumps(result.json(), indent=4))
def print_json_detailed_info(self, dev_id):
url = "https://{}/webacs/api/v3/data/InventoryDetails/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
print(json.dumps(result.json(), indent=4))
# --- end print API calls, mainly for testing
def find_config_archive_id(self, dev_id):
url = "https://{}/webacs/api/v3/data/ConfigVersions.json?deviceName={}".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entityId']
temp = self.parse_json.value(result.json(), key_list, self.logger)
return temp[len(temp) - 1]['$']
def config_archive_by_id(self, dev_id):
url = "https://{}/webacs/api/v3/data/ConfigVersions/{}.json".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['queryResponse', 'entity', 0, 'configVersionsDTO', 'fileInfos', 'fileInfo']
temp = self.parse_json.value(result.json(), key_list, self.logger)
for i in temp:
if i['fileState'] == 'RUNNINGCONFIG':
config_id = i['fileId']
return config_id
def config_archive_content(self, dev_id):
url = "https://{}/webacs/api/v1/op/configArchiveService/extractSanitizedFile.json?fileId={}".format(self.cpi_ipv4_address, dev_id)
result = self.error_handling(requests.get, 5, url, False, self.username, self.password)
key_list = ['mgmtResponse', 'extractFileResult']
return self.parse_json.value(result.json(), key_list, self.logger)
| 50.002959
| 162
| 0.571268
| 1,774
| 16,901
| 5.264374
| 0.125705
| 0.021416
| 0.046472
| 0.05643
| 0.802441
| 0.784988
| 0.768605
| 0.754042
| 0.743656
| 0.729736
| 0
| 0.01225
| 0.294835
| 16,901
| 338
| 163
| 50.002959
| 0.771354
| 0.028756
| 0
| 0.565385
| 0
| 0.026923
| 0.235108
| 0.011646
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103846
| false
| 0.103846
| 0.011538
| 0
| 0.207692
| 0.015385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ee4686d9d269e9f98405efddf2a966c12def235a
| 11,383
|
py
|
Python
|
tests/treepath/test_filter_match.py
|
monkeydevtools/treepath-python
|
56f6cbf662f8a4c13f0c9e753a839fc9f6323dba
|
[
"Apache-2.0"
] | 2
|
2021-05-26T08:26:25.000Z
|
2021-09-24T21:26:01.000Z
|
tests/treepath/test_filter_match.py
|
monkeydevtools/treepath-python
|
56f6cbf662f8a4c13f0c9e753a839fc9f6323dba
|
[
"Apache-2.0"
] | null | null | null |
tests/treepath/test_filter_match.py
|
monkeydevtools/treepath-python
|
56f6cbf662f8a4c13f0c9e753a839fc9f6323dba
|
[
"Apache-2.0"
] | null | null | null |
import os
from tests.utils.traverser_utils import *
from treepath import get, path, find_matches, has, get_match, wc, find, MatchNotFoundError, PathSyntaxError, Match, \
has_all, has_any
from treepath.path.traverser.imaginary_match import ImaginaryMatch
def test_keys_get_root_has_a_MatchNotFoundError(keys):
with pytest.raises(MatchNotFoundError):
get(path[has(path.a)], keys)
def test_PathSyntaxError_validate_message(keys):
expected = "PathSyntaxError(Invalid path [<class 'int'>] argument. Expecting PathBuilderPredicate, " \
f"PathPredicate, or Callable[[Match], Any]])"
with pytest.raises(PathSyntaxError) as exc_info:
has(1)
assert repr(exc_info.value) == expected
def test_keys_get_root_has_x(keys):
expected = keys
actual = get(path[has(path.x)], keys)
assert actual == expected
def test_keys_get_root_x_were_root_has_a_MatchNotFoundError(keys):
with pytest.raises(MatchNotFoundError):
get(path[has(path.a)].x, keys)
def test_keys_get_root_x_were_root_has_y(keys):
expected = keys["x"]
actual = get(path[has(path.y)].x, keys)
assert actual == expected
def test_keys_get_match_rec_verify_imaginary_match_path_as_list_correct(keys):
actual = get_match(path.x.x.rec, keys)
assert isinstance(actual._traverser_match, ImaginaryMatch)
assert actual.path_as_list == [actual.parent.parent, actual.parent, actual]
def test_keys_match_all_root_wc_has_x(keys):
result = find_matches(path.wc[has(path.x)], keys)
for expected_path, expected_value in gen_test_data(keys, yaia):
actual = next(result)
assert str(actual) == f"{expected_path}={actual.data}"
assert_done_iterating(result)
def test_keys_match_all_root_wc_has_a(keys):
result = find_matches(path.wc[has(path.a)], keys)
assert_done_iterating(result)
def test_keys_match_all_all_has_x(keys):
exp_iter = find_matches(path.rec[has(path.x)], keys)
count = 0
for expected_path, expected_value in gen_test_data(keys, yria, yaia, yaia):
count += 1
actual = next(exp_iter)
assert repr(actual) == f"{expected_path}={expected_value}"
assert count == 13
assert_done_iterating(exp_iter)
def test_keys_match_all_all_has_a(keys):
exp_iter = find_matches(path.rec[has(path.a)], keys)
assert_done_iterating(exp_iter)
def test_keys_match_all_all_has_x_eq_1(keys):
exp_iter = find_matches(path.rec.x[has(path.rec.x == "1")], keys)
actual = next(exp_iter)
expected = get_match(path.x, keys)
assert repr(actual) == repr(expected)
actual = next(exp_iter)
expected = get_match(path.x.x, keys)
assert repr(actual) == repr(expected)
assert_done_iterating(exp_iter)
def test_keys_lt(keys):
expected = [str(v) for v in range(1, 14)]
actual = [v for v in find(path.wc.wc.wc[has(path < 14, int)], keys)]
assert actual == expected
def test_keys_le(keys):
expected = [str(v) for v in range(1, 15)]
actual = [v for v in find(path.wc.wc.wc[has(path <= 14, int)], keys)]
assert actual == expected
def test_keys_eq(keys):
expected = ["14"]
actual = [v for v in find(path.wc.wc.wc[has(path == 14, int)], keys)]
assert actual == expected
def test_keys_ne(keys):
expected = [str(v) for v in range(1, 28)]
expected.remove("14")
actual = [v for v in find(path.wc.wc.wc[has(path != 14, int)], keys)]
assert actual == expected
def test_keys_gt(keys):
expected = [str(v) for v in range(15, 28)]
actual = [v for v in find(path.wc.wc.wc[has(path > 14, int)], keys)]
assert actual == expected
def test_keys_ge(keys):
expected = [str(v) for v in range(14, 28)]
actual = [v for v in find(path.wc.wc.wc[has(path >= 14, int)], keys)]
assert actual == expected
def test_3d_list_get_root_has_a_MatchNotFoundError(three_dimensional_list):
with pytest.raises(MatchNotFoundError):
get(path[has(path[4])], three_dimensional_list)
def test_3d_list_get_root_has_1(three_dimensional_list):
expected = three_dimensional_list
actual = get(path[has(path[1])], three_dimensional_list)
assert actual == expected
def test_3d_list_get_root_1_were_root_has_a_MatchNotFoundError(three_dimensional_list):
with pytest.raises(MatchNotFoundError):
get(path[has(path[4])][1], three_dimensional_list)
def test_3d_list_get_root_1_were_root_has_1(three_dimensional_list):
expected = three_dimensional_list[1]
actual = get(path[has(path[1])][1], three_dimensional_list)
assert actual == expected
def test_3d_list_match_all_root_wc_has_1(three_dimensional_list):
result = find_matches(path[wc][has(path[1])], three_dimensional_list)
for expected_path, expected_value in gen_test_data(three_dimensional_list, yaia):
actual = next(result)
assert str(actual) == f"{expected_path}={actual.data}"
assert_done_iterating(result)
def test_3d_list_match_all_root_wc_has_a(three_dimensional_list):
result = find_matches(path[wc][has(path[4])], three_dimensional_list)
assert_done_iterating(result)
def test_3d_list_match_all_all_has_1(three_dimensional_list):
exp_iter = find_matches(path.rec[has(path[1])], three_dimensional_list)
count = 0
for expected_path, expected_value in gen_test_data(three_dimensional_list, yria, yaia, yaia):
count += 1
actual = next(exp_iter)
assert repr(actual) == f"{expected_path}={expected_value}"
assert count == 13
assert_done_iterating(exp_iter)
def test_3d_list_match_all_all_has_a(three_dimensional_list):
exp_iter = find_matches(path.rec[has(path[4])], three_dimensional_list)
assert_done_iterating(exp_iter)
def test_3d_list_match_all_all_has_1_eq_1(three_dimensional_list):
exp_iter = find_matches(path.rec[1][has(path.rec[1] == 14)], three_dimensional_list)
actual = next(exp_iter)
expected = get_match(path[1], three_dimensional_list)
assert repr(actual) == repr(expected)
actual = next(exp_iter)
expected = get_match(path[1][1], three_dimensional_list)
assert repr(actual) == repr(expected)
assert_done_iterating(exp_iter)
def test_3d_list_lt(three_dimensional_list):
expected = [v for v in range(1, 14)]
actual = [v for v in find(path[wc][wc][wc][has(path < 14)], three_dimensional_list)]
assert actual == expected
def test_3d_list_le(three_dimensional_list):
expected = [v for v in range(1, 15)]
actual = [v for v in find(path[wc][wc][wc][has(path <= 14)], three_dimensional_list)]
assert actual == expected
def test_3d_list_eq(three_dimensional_list):
expected = [14]
actual = [v for v in find(path[wc][wc][wc][has(path == 14)], three_dimensional_list)]
assert actual == expected
def test_3d_list_ne(three_dimensional_list):
expected = [v for v in range(1, 28)]
expected.remove(14)
actual = [v for v in find(path[wc][wc][wc][has(path != 14)], three_dimensional_list)]
assert actual == expected
def test_3d_list_gt(three_dimensional_list):
expected = [v for v in range(15, 28)]
actual = [v for v in find(path[wc][wc][wc][has(path > 14)], three_dimensional_list)]
assert actual == expected
def test_3d_list_ge(three_dimensional_list):
expected = [v for v in range(14, 28)]
actual = [v for v in find(path[wc][wc][wc][has(path >= 14)], three_dimensional_list)]
assert actual == expected
def test_keys_custom_filter(keys):
def custom_filter(match: Match):
return match.data == "2"
expected = "2"
actual = get(path.x.x.wc[custom_filter], keys)
assert actual == expected
def test_keys_get_root_has_x_and_y_and_z(keys):
expected = keys
actual = get(path[has_all(path.x, path.y, path.z)], keys)
assert actual == expected
def test_keys_get_root_has_x_and_y_and_a(keys):
with pytest.raises(MatchNotFoundError):
get(path[has_all(path.x, path.y, path.a)], keys)
def test_keys_get_root_has_x_or_y_or_z(keys):
expected = keys
actual = get(path[has_any(path.x, path.b, path.c)], keys)
assert actual == expected
actual = get(path[has_any(path.a, path.y, path.c)], keys)
assert actual == expected
actual = get(path[has_any(path.a, path.b, path.z)], keys)
assert actual == expected
def test_keys_get_root_has_a_or_b_and_c(keys):
with pytest.raises(MatchNotFoundError):
get(path[has_any(path.a, path.b, path.c)], keys)
def test_keys_get_root_has_these_order_one(keys):
keys["a"] = {"b": {"c": ':)'}}
def zp(parent_match: Match):
return parent_match.data.get('z', None) == '27'
@has.these((path.x == 1, int), path.y == '14', path.c, zp)
def predicate(parent_match: Match, one, two, three, four):
return one(parent_match) or two(parent_match) or three(parent_match) or four(parent_match)
itr = find(path.wc.wc[predicate], keys)
actual = next(itr)
assert actual == {"x": "1", "y": "2", "z": "3"}
actual = next(itr)
assert actual == {'x': '13', 'y': '14', 'z': '15'}
actual = next(itr)
assert actual == {"x": "25", "y": "26", "z": "27"}
actual = next(itr)
assert actual == {"c": ':)'}
def test_keys_get_root_has_these_order_two(keys):
keys["a"] = {"b": {"c": ':)'}}
def zp(parent_match: Match):
return parent_match.data.get('z', None) == '27'
@has.these(zp, (path.x == 1, int), path.y == '14', path.c)
def predicate(parent_match: Match, one, two, three, four):
return one(parent_match) or two(parent_match) or three(parent_match) or four(parent_match)
itr = find(path.wc.wc[predicate], keys)
actual = next(itr)
assert actual == {"x": "1", "y": "2", "z": "3"}
actual = next(itr)
assert actual == {'x': '13', 'y': '14', 'z': '15'}
actual = next(itr)
assert actual == {"x": "25", "y": "26", "z": "27"}
actual = next(itr)
assert actual == {"c": ':)'}
def test_keys_get_root_has_these_order_three(keys):
keys["a"] = {"b": {"c": ':)'}}
def zp(parent_match: Match):
return parent_match.data.get('z', None) == '27'
@has.these(path.c, zp, (path.x == 1, int), path.y == '14')
def predicate(parent_match: Match, one, two, three, four):
return one(parent_match) or two(parent_match) or three(parent_match) or four(parent_match)
itr = find(path.wc.wc[predicate], keys)
actual = next(itr)
assert actual == {"x": "1", "y": "2", "z": "3"}
actual = next(itr)
assert actual == {'x': '13', 'y': '14', 'z': '15'}
actual = next(itr)
assert actual == {"x": "25", "y": "26", "z": "27"}
actual = next(itr)
assert actual == {"c": ':)'}
def test_keys_get_root_has_these_order_four(keys):
keys["a"] = {"b": {"c": ':)'}}
def zp(parent_match: Match):
return parent_match.data.get('z', None) == '27'
@has.these(path.y == '14', path.c, zp, (path.x == 1, int))
def predicate(parent_match: Match, one, two, three, four):
return one(parent_match) or two(parent_match) or three(parent_match) or four(parent_match)
itr = find(path.wc.wc[predicate], keys)
actual = next(itr)
assert actual == {"x": "1", "y": "2", "z": "3"}
actual = next(itr)
assert actual == {'x': '13', 'y': '14', 'z': '15'}
actual = next(itr)
assert actual == {"x": "25", "y": "26", "z": "27"}
actual = next(itr)
assert actual == {"c": ':)'}
| 31.796089
| 117
| 0.67021
| 1,763
| 11,383
| 4.078276
| 0.062961
| 0.039917
| 0.100139
| 0.021419
| 0.882198
| 0.873992
| 0.860083
| 0.839638
| 0.789569
| 0.688456
| 0
| 0.021489
| 0.182377
| 11,383
| 357
| 118
| 31.885154
| 0.751048
| 0
| 0
| 0.489627
| 0
| 0
| 0.036106
| 0.014583
| 0
| 0
| 0
| 0
| 0.248963
| 1
| 0.207469
| false
| 0
| 0.016598
| 0.037344
| 0.261411
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9993e5a9b843d69e9d48e51122af3aec7ab817f
| 5,864
|
py
|
Python
|
utils/port_data.py
|
adavila0703/warehouse-hub
|
29778d605e372d6d6b41d05a3637edefb047f0bc
|
[
"MIT"
] | null | null | null |
utils/port_data.py
|
adavila0703/warehouse-hub
|
29778d605e372d6d6b41d05a3637edefb047f0bc
|
[
"MIT"
] | null | null | null |
utils/port_data.py
|
adavila0703/warehouse-hub
|
29778d605e372d6d6b41d05a3637edefb047f0bc
|
[
"MIT"
] | null | null | null |
from openpyxl import load_workbook
import sqlite3
from datetime import datetime
def lt_check(time1, time2):
"""Checks time between two dates"""
try:
tm1 = datetime(int(time1.split()[0].split('-')[0]), int(time1.split()[0].split('-')[1]),
int(time1.split()[0].split('-')[2]), int(time1.split()[1].split(':')[0]),
int(time1.split()[1].split(':')[1]), int(time1.split()[1].split(':')[2].split('.')[0]))
tm2 = datetime(int(time2.split()[0].split('-')[0]), int(time2.split()[0].split('-')[1]),
int(time2.split()[0].split('-')[2]), int(time2.split()[1].split(':')[0]),
int(time2.split()[1].split(':')[1]), int(time2.split()[1].split(':')[2].split('.')[0]))
return str(tm2 - tm1)
except IndexError:
return ''
def port_micro():
"""Function meant to port all micro data to the new database"""
lb = load_workbook('micro.xlsx')
ws = lb.active
sheet_range = lb['Sheet1']
letters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L']
obj = []
lettercount = 0
count = 1
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
while True:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
while True:
if lettercount <= 11:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
obj.append(str(sheet_range[f'{letters[lettercount]}{count}'].value))
else:
obj.append('')
lettercount += 1
else:
lettercount = 0
count += 1
rtos = lt_check(obj[5], obj[7])
stoc = lt_check(obj[7], obj[10])
query = 'INSERT INTO mag_table(date_created, employee, item, serial_num, ins_type, rec_date, ' \
'start_date, accessories, appearance, functions, cleaning, complete, notes) ' \
'VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?)'
cursor.execute(query,
(obj[0], obj[1], obj[2], obj[3], obj[4], obj[5], obj[6], obj[7], obj[8], obj[9],
obj[10], rtos, stoc))
connection.commit()
obj = []
break
else:
break
def port_marking():
"""Function meant to port all marking data to the new database"""
lb = load_workbook('marking.xlsx')
ws = lb.active
sheet_range = lb['Sheet1']
letters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L']
obj = []
lettercount = 0
count = 1
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
while True:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
while True:
if lettercount <= 11:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
obj.append(str(sheet_range[f'{letters[lettercount]}{count}'].value))
else:
obj.append('')
lettercount += 1
else:
lettercount = 0
count += 1
rtos = lt_check(obj[5], obj[7])
stoc = lt_check(obj[7], obj[10])
query = 'INSERT INTO mag_table(date_created, employee, item, serial_num, ins_type, rec_date, ' \
'start_date, accessories, appearance, functions, cleaning, complete, notes) ' \
'VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?)'
cursor.execute(query,
(obj[0], obj[1], obj[2], obj[3], obj[4], obj[5], obj[6], obj[7], obj[8], obj[9],
obj[10], rtos, stoc))
connection.commit()
obj = []
break
else:
break
def port_printing():
lb = load_workbook('pct.xlsx')
ws = lb.active
sheet_range = lb['Sheet1']
letters = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
'V', 'W']
obj = []
lettercount = 0
count = 1
connection = sqlite3.connect('data.db')
cursor = connection.cursor()
while True:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
while True:
if lettercount <= 22:
if sheet_range[f'{letters[lettercount]}{count}'].value is not None:
obj.append(str(sheet_range[f'{letters[lettercount]}{count}'].value))
else:
obj.append('')
lettercount += 1
else:
lettercount = 0
count += 1
rtos = lt_check(obj[5], obj[7])
stoc = lt_check(obj[7], obj[10])
query = 'INSERT INTO mag_table(date_created, employee, item, serial_num, ins_type, rec_date, ' \
'start_date, accessories, appearance, functions, cleaning, complete, notes) ' \
'VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?)'
cursor.execute(query,
(obj[0], obj[1], obj[2], obj[3], obj[4], obj[5], obj[6], obj[7], obj[8], obj[9],
obj[10], rtos, stoc))
connection.commit()
obj = []
break
else:
break
if __name__ == '__main__':
port_micro()
port_marking()
port_printing()
| 37.350318
| 119
| 0.448329
| 634
| 5,864
| 4.059937
| 0.201893
| 0.027972
| 0.038462
| 0.062937
| 0.883838
| 0.782828
| 0.768842
| 0.768842
| 0.742424
| 0.742424
| 0
| 0.033847
| 0.380286
| 5,864
| 156
| 120
| 37.589744
| 0.674463
| 0.025068
| 0
| 0.782258
| 0
| 0
| 0.171023
| 0.07525
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.024194
| 0
| 0.072581
| 0.016129
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9bdc12436a632cde28933eaa9ca1468b6e00da0
| 6,465
|
py
|
Python
|
client_messages.py
|
Szredor/archvm_manager
|
73c12f0aa1a8f33fdf088ec62c69b632be8ce966
|
[
"MIT"
] | 1
|
2021-02-16T17:21:47.000Z
|
2021-02-16T17:21:47.000Z
|
client_messages.py
|
Szredor/archvm_manager
|
73c12f0aa1a8f33fdf088ec62c69b632be8ce966
|
[
"MIT"
] | null | null | null |
client_messages.py
|
Szredor/archvm_manager
|
73c12f0aa1a8f33fdf088ec62c69b632be8ce966
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
import subprocess
import socket
import sys
import os
sys.path.append("./common")
import sockets
import xml_parsing
def printHelp() -> None:
print("Type number of machine to connect with.")
print("Type r to refresh list od domains.")
print("Type \"exit\" to quit.")
print("Type \"help\" or ? for this message.")
def printAdminHelp() -> None:
print("Type number of machine to change it state.")
print("Type r to refresh list od domains.")
print("Type \"exit\" to quit.")
print("Type \"help\" or ? for this message.")
def printError(text) -> None:
print (f'ERROR: {text}')
def helloMessage(address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.HELLO)).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] != sockets.HELLO:
raise RuntimeError("Wrong HELLO packet received")
print(data.decode(encoding='utf-8')[1:])
except socket.timeout:
printError(f"Connection timed out")
result = False
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
result = False
except RuntimeError as err:
printError (err)
result = False
finally:
sock.close()
return result
def connectMessage(name, address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.CONNECT) + name).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] == sockets.ERROR:
printError(data.decode(encoding='utf-8')[1:])
result = False
except socket.timeout:
printError(f"Connection timed out")
result = False
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
result = False
except RuntimeError as err:
printError (err)
result = False
finally:
sock.close()
return result
def disconnectMessage(name, address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.DISCONNECT) + name).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] == sockets.ERROR:
printError(data.decode(encoding='utf-8')[1:])
result = False
except socket.timeout:
printError(f"Connection timed out")
result = False
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
result = False
except RuntimeError as err:
printError (err)
result = False
finally:
sock.close()
return result
def heartbeatMessage(name, address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.HEARTBEAT) + name).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] == sockets.ERROR:
printError(data.decode(encoding='utf-8')[1:])
result = False
except socket.timeout:
result = False
except ConnectionResetError as err:
result = False
except RuntimeError as err:
result = False
finally:
sock.close()
return result
def refreshMessage(address, port, bufSize) -> [xml_parsing.Domain]:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return None
result = None
try:
sockets.writeSocket(sock, (chr(sockets.REFRESH)).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize).decode(encoding='utf-8')
result = xml_parsing.importDomainsFromString(data[1:])
except socket.timeout:
printError(f"Connection timed out")
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
except RuntimeError as err:
printError (err)
result = []
finally:
sock.close()
return result
def bootMessage(name, address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.BOOT) + name).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] == sockets.ERROR:
printError(data.decode(encoding='utf-8')[1:])
result = False
except socket.timeout:
printError(f"Connection timed out")
result = False
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
result = False
except RuntimeError as err:
printError (err)
result = False
finally:
sock.close()
return result
def shutdownMessage(name, address, port, bufSize) -> bool:
sock = sockets.create_connected_socket(address, port)
if sock is None:
return False
result = True
try:
sockets.writeSocket(sock, (chr(sockets.SHUTDOWN) + name).encode(encoding='utf-8'))
data = sockets.readSocket(sock, bufSize)
if data[0] == sockets.ERROR:
printError(data.decode(encoding='utf-8')[1:])
result = False
except socket.timeout:
printError(f"Connection timed out")
result = False
except ConnectionResetError as err:
printError(f"Connection from {address} reset")
result = False
except RuntimeError as err:
printError (err)
result = False
finally:
sock.close()
return result
def runMoonlight(address, path) -> int:
processStatus = os.system(path)
#processStatus = subprocess.run([path, 'stream', address, 'mstsc.exe'])
#processStatus = subprocess.run([path, 'quit', 'address'])
return processStatus
#Run RDP and wait for it.
def runRDP(address, path) -> int:
processStatus = os.system(f'powershell -Command Start-Process -FilePath {path} -ArgumentList "/v:{address}" -Wait')
return processStatus
| 31.536585
| 119
| 0.634029
| 740
| 6,465
| 5.516216
| 0.164865
| 0.061979
| 0.070799
| 0.044586
| 0.817981
| 0.817981
| 0.761391
| 0.736404
| 0.736404
| 0.700882
| 0
| 0.005831
| 0.257231
| 6,465
| 204
| 120
| 31.691176
| 0.844232
| 0.026759
| 0
| 0.782123
| 0
| 0
| 0.118639
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067039
| false
| 0
| 0.039106
| 0
| 0.195531
| 0.201117
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9ec8f2ae1de00249557588a90580d1c8081d4b5
| 2,858
|
py
|
Python
|
project/AutoEncoder.py
|
Challyfilio/NAIC2021
|
11b38a920dcc902f9b798dc43ae360062862e6e4
|
[
"MIT"
] | null | null | null |
project/AutoEncoder.py
|
Challyfilio/NAIC2021
|
11b38a920dcc902f9b798dc43ae360062862e6e4
|
[
"MIT"
] | null | null | null |
project/AutoEncoder.py
|
Challyfilio/NAIC2021
|
11b38a920dcc902f9b798dc43ae360062862e6e4
|
[
"MIT"
] | null | null | null |
# ALL relu
import torch.nn as nn
class AutoEncoder(nn.Module):
def __init__(self, byte):
super(AutoEncoder, self).__init__()
if byte == 256:
self.encoder = nn.Sequential(
nn.Linear(2048, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, 256),
)
self.decoder = nn.Sequential(
nn.Linear(256, 512),
nn.ReLU(),
nn.Linear(512, 1024),
nn.ReLU(),
nn.Linear(1024, 2048),
# nn.ReLU()
nn.ReLU()
)
elif byte == 128:
self.encoder = nn.Sequential(
nn.Linear(2048, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, 256),
nn.ReLU(),
nn.Linear(256, 128),
)
self.decoder = nn.Sequential(
nn.Linear(128, 256),
nn.ReLU(),
nn.Linear(256, 512),
nn.ReLU(),
nn.Linear(512, 1024),
nn.ReLU(),
nn.Linear(1024, 2048),
# nn.ReLU()
nn.ReLU()
)
elif byte == 64:
self.encoder = nn.Sequential(
nn.Linear(2048, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, 256),
nn.ReLU(),
nn.Linear(256, 128),
nn.ReLU(),
nn.Linear(128, 64),
)
self.decoder = nn.Sequential(
nn.Linear(64, 128),
nn.ReLU(),
nn.Linear(128, 256),
nn.ReLU(),
nn.Linear(256, 512),
nn.ReLU(),
nn.Linear(512, 1024),
nn.ReLU(),
nn.Linear(1024, 2048),
# nn.ReLU()
nn.ReLU()
)
else:
self.encoder = nn.Sequential(
nn.Linear(2048, 1024),
nn.ReLU(),
nn.Linear(1024, 512),
nn.ReLU(),
nn.Linear(512, byte),
)
self.decoder = nn.Sequential(
nn.Linear(byte, 512),
nn.ReLU(),
nn.Linear(512, 1024),
nn.ReLU(),
nn.Linear(1024, 2048),
# nn.ReLU()
nn.ReLU()
)
def forward(self, input, tag=False):
if tag == False:
compress = self.encoder(input)
else:
compress = input
reconstruct = self.decoder(compress)
return compress, reconstruct
| 28.868687
| 44
| 0.374388
| 264
| 2,858
| 4.022727
| 0.128788
| 0.225989
| 0.195857
| 0.290019
| 0.764595
| 0.764595
| 0.638418
| 0.638418
| 0.638418
| 0.638418
| 0
| 0.14346
| 0.502449
| 2,858
| 98
| 45
| 29.163265
| 0.603376
| 0.016795
| 0
| 0.712644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022989
| false
| 0
| 0.011494
| 0
| 0.057471
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9f2f32ba26ca5d0915df37998964571b27adadc
| 44
|
py
|
Python
|
gunicorn.conf.py
|
airatk/flask.base
|
833cb286551b03016d2c2e273a116af15ab6a43b
|
[
"MIT"
] | null | null | null |
gunicorn.conf.py
|
airatk/flask.base
|
833cb286551b03016d2c2e273a116af15ab6a43b
|
[
"MIT"
] | null | null | null |
gunicorn.conf.py
|
airatk/flask.base
|
833cb286551b03016d2c2e273a116af15ab6a43b
|
[
"MIT"
] | null | null | null |
bind: str = "0.0.0.0:3000"
workers: int = 3
| 14.666667
| 26
| 0.590909
| 10
| 44
| 2.6
| 0.7
| 0.230769
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.181818
| 44
| 2
| 27
| 22
| 0.472222
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4e4ca3fc0c3aeca46c1f657d3ed93c65ccf00315
| 115
|
py
|
Python
|
OreoML/__init__.py
|
harishsg99/OreoML
|
ac7d48952c48e9455c78b3ed3a0c95a20d01352e
|
[
"MIT"
] | 5
|
2020-09-07T01:51:27.000Z
|
2022-01-02T17:39:11.000Z
|
OreoML/__init__.py
|
harishsg99/OreoML
|
ac7d48952c48e9455c78b3ed3a0c95a20d01352e
|
[
"MIT"
] | null | null | null |
OreoML/__init__.py
|
harishsg99/OreoML
|
ac7d48952c48e9455c78b3ed3a0c95a20d01352e
|
[
"MIT"
] | null | null | null |
from .predict import automl_predict
from .predict import cmd
from .train import automl_train
from .train import cmd
| 28.75
| 35
| 0.834783
| 18
| 115
| 5.222222
| 0.333333
| 0.234043
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 115
| 4
| 36
| 28.75
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4ea4731924d4b540959f4ddb753ce9f6135508e0
| 323
|
py
|
Python
|
mktestdocs/__init__.py
|
koaning/mktestdocs
|
96bfc7e43cf6d14053db215b183ba68d9df6c616
|
[
"Apache-2.0"
] | 49
|
2020-12-02T15:47:47.000Z
|
2022-03-18T15:04:25.000Z
|
mktestdocs/__init__.py
|
koaning/mktestdocs
|
96bfc7e43cf6d14053db215b183ba68d9df6c616
|
[
"Apache-2.0"
] | null | null | null |
mktestdocs/__init__.py
|
koaning/mktestdocs
|
96bfc7e43cf6d14053db215b183ba68d9df6c616
|
[
"Apache-2.0"
] | 5
|
2021-07-20T17:22:09.000Z
|
2021-12-23T14:12:46.000Z
|
from mktestdocs.__main__ import (
check_codeblock,
grab_code_blocks,
check_docstring,
check_md_file,
get_codeblock_members,
)
__version__ = "0.1.1"
__all__ = [
"__version__",
"check_codeblock",
"grab_code_blocks",
"check_docstring",
"check_md_file",
"get_codeblock_members",
]
| 17
| 33
| 0.681115
| 36
| 323
| 5.222222
| 0.5
| 0.148936
| 0.191489
| 0.234043
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0
| 0.011811
| 0.213622
| 323
| 18
| 34
| 17.944444
| 0.728346
| 0
| 0
| 0
| 0
| 0
| 0.297214
| 0.065015
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14d230670c53957cd3499728681eab3bc690894b
| 12,627
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_image.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_image.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/rpc/_rpc_message_image.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common import debug
from nfv_vim.rpc._rpc_defs import RPC_MSG_RESULT
from nfv_vim.rpc._rpc_defs import RPC_MSG_TYPE
from nfv_vim.rpc._rpc_defs import RPC_MSG_VERSION
from nfv_vim.rpc._rpc_message import RPCMessage
DLOG = debug.debug_get_logger('nfv_vim.rpc.image')
class APIRequestCreateImage(RPCMessage):
"""
RPC API Request Message - Create Image
"""
name = None
description = None
container_format = None
disk_format = None
min_disk_size_gb = None
min_memory_size_mb = None
visibility = None
protected = None
properties = None
image_data_ref = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.CREATE_IMAGE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestCreateImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['name'] = self.name
msg['description'] = self.description
msg['container_format'] = self.container_format
msg['disk_format'] = self.disk_format
msg['min_disk_size_gb'] = self.min_disk_size_gb
msg['min_memory_size_mb'] = self.min_memory_size_mb
msg['visibility'] = self.visibility
msg['protected'] = self.protected
msg['properties'] = self.properties
msg['image_data_ref'] = self.image_data_ref
def deserialize_payload(self, msg):
self.name = msg.get('name', None)
self.description = msg.get('description', None)
self.container_format = msg.get('container_format', None)
self.disk_format = msg.get('disk_format', None)
self.min_disk_size_gb = msg.get('min_disk_size_gb', None)
self.min_memory_size_mb = msg.get('min_memory_size_mb', None)
self.visibility = msg.get('visibility', None)
self.protected = msg.get('protected', None)
self.properties = msg.get('properties', None)
self.image_data_ref = msg.get('image_data_ref', None)
def __str__(self):
return ("create-image request: %s, %s, %s, %s, %s"
% (self.name, self.description, self.container_format,
self.disk_format, self.image_data_ref))
class APIResponseCreateImage(RPCMessage):
"""
RPC API Response Message - Create Image
"""
uuid = None
name = None
description = None
container_format = None
disk_format = None
min_disk_size_gb = None
min_memory_size_mb = None
visibility = None
protected = None
avail_status = None
action = None
properties = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.CREATE_IMAGE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseCreateImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['name'] = self.name
msg['description'] = self.description
msg['container_format'] = self.container_format
msg['disk_format'] = self.disk_format
msg['min_disk_size_gb'] = self.min_disk_size_gb
msg['min_memory_size_mb'] = self.min_memory_size_mb
msg['visibility'] = self.visibility
msg['protected'] = self.protected
msg['avail_status'] = self.avail_status
msg['action'] = self.action
msg['properties'] = self.properties
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.name = msg.get('name', None)
self.description = msg.get('description', None)
self.container_format = msg.get('container_format', None)
self.disk_format = msg.get('disk_format', None)
self.min_disk_size_gb = msg.get('min_disk_size_gb', None)
self.min_memory_size_mb = msg.get('min_memory_size_mb', None)
self.visibility = msg.get('visibility', None)
self.protected = msg.get('protected', None)
self.avail_status = msg.get('avail_status', None)
self.action = msg.get('action', None)
self.properties = msg.get('properties', None)
def __str__(self):
return ("create-image response: %s, %s, %s, %s"
% (self.uuid, self.name, self.container_format,
self.disk_format))
class APIRequestUpdateImage(RPCMessage):
"""
RPC API Request Message - Update Image
"""
uuid = None
description = None
min_disk_size_gb = None
min_memory_size_mb = None
visibility = None
protected = None
properties = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.UPDATE_IMAGE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestUpdateImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['description'] = self.description
msg['min_disk_size_gb'] = self.min_disk_size_gb
msg['min_memory_size_mb'] = self.min_memory_size_mb
msg['visibility'] = self.visibility
msg['protected'] = self.protected
msg['properties'] = self.properties
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.description = msg.get('description', None)
self.min_disk_size_gb = msg.get('min_disk_size_gb', None)
self.min_memory_size_mb = msg.get('min_memory_size_mb', None)
self.visibility = msg.get('visibility', None)
self.protected = msg.get('protected', None)
self.properties = msg.get('properties', None)
def __str__(self):
return "update-image request: %s" % self.uuid
class APIResponseUpdateImage(RPCMessage):
"""
RPC API Response Message - Update Image
"""
uuid = None
name = None
description = None
container_format = None
disk_format = None
min_disk_size_gb = None
min_memory_size_mb = None
visibility = None
protected = None
avail_status = None
action = None
properties = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.UPDATE_IMAGE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseUpdateImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['name'] = self.name
msg['description'] = self.description
msg['container_format'] = self.container_format
msg['disk_format'] = self.disk_format
msg['min_disk_size_gb'] = self.min_disk_size_gb
msg['min_memory_size_mb'] = self.min_memory_size_mb
msg['visibility'] = self.visibility
msg['protected'] = self.protected
msg['avail_status'] = self.avail_status
msg['action'] = self.action
msg['properties'] = self.properties
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.name = msg.get('name', None)
self.description = msg.get('description', None)
self.container_format = msg.get('container_format', None)
self.disk_format = msg.get('disk_format', None)
self.min_disk_size_gb = msg.get('min_disk_size_gb', None)
self.min_memory_size_mb = msg.get('min_memory_size_mb', None)
self.visibility = msg.get('visibility', None)
self.protected = msg.get('protected', None)
self.avail_status = msg.get('avail_status', None)
self.action = msg.get('action', None)
self.properties = msg.get('properties', None)
def __str__(self):
return ("update-image response: %s, %s, %s, %s"
% (self.uuid, self.name, self.container_format,
self.disk_format))
class APIRequestDeleteImage(RPCMessage):
"""
RPC API Request Message - Delete Image
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.DELETE_IMAGE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestDeleteImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "delete-image request: %s" % self.uuid
class APIResponseDeleteImage(RPCMessage):
"""
RPC API Response Message - Delete Image
"""
uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.DELETE_IMAGE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseDeleteImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
def __str__(self):
return "delete-image response: %s" % self.uuid
class APIRequestGetImage(RPCMessage):
"""
RPC API Request Message - Get Image
"""
get_all = False
filter_by_uuid = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.GET_IMAGE_REQUEST,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIRequestGetImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['get_all'] = self.get_all
msg['filter_by_uuid'] = self.filter_by_uuid
def deserialize_payload(self, msg):
self.get_all = msg.get('get_all', True)
self.filter_by_uuid = msg.get('filter_by_uuid', None)
def __str__(self):
if self.get_all:
return "get-image request: get-all"
else:
return "get-image request: %s" % self.filter_by_uuid
class APIResponseGetImage(RPCMessage):
"""
RPC API Response Message - Get Image
"""
uuid = None
name = None
description = None
container_format = None
disk_format = None
min_disk_size_gb = None
min_memory_size_mb = None
visibility = None
protected = None
avail_status = None
action = None
properties = None
def __init__(self, msg_version=RPC_MSG_VERSION.VERSION_1_0,
msg_type=RPC_MSG_TYPE.GET_IMAGE_RESPONSE,
msg_result=RPC_MSG_RESULT.SUCCESS):
super(APIResponseGetImage, self).__init__(msg_version, msg_type,
msg_result)
def serialize_payload(self, msg):
msg['uuid'] = self.uuid
msg['name'] = self.name
msg['description'] = self.description
msg['container_format'] = self.container_format
msg['disk_format'] = self.disk_format
msg['min_disk_size_gb'] = self.min_disk_size_gb
msg['min_memory_size_mb'] = self.min_memory_size_mb
msg['visibility'] = self.visibility
msg['protected'] = self.protected
msg['avail_status'] = self.avail_status
msg['action'] = self.action
msg['properties'] = self.properties
def deserialize_payload(self, msg):
self.uuid = msg.get('uuid', None)
self.name = msg.get('name', None)
self.description = msg.get('description', None)
self.container_format = msg.get('container_format', None)
self.disk_format = msg.get('disk_format', None)
self.min_disk_size_gb = msg.get('min_disk_size_gb', None)
self.min_memory_size_mb = msg.get('min_memory_size_mb', None)
self.visibility = msg.get('visibility', None)
self.protected = msg.get('protected', None)
self.avail_status = msg.get('avail_status', None)
self.action = msg.get('action', None)
self.properties = msg.get('properties', None)
def __str__(self):
return "get-image response: %s, %s, %s, %s" % (self.uuid, self.name,
self.container_format,
self.disk_format)
| 36.284483
| 77
| 0.625327
| 1,554
| 12,627
| 4.756113
| 0.055341
| 0.047084
| 0.037207
| 0.043972
| 0.884589
| 0.835476
| 0.82154
| 0.811663
| 0.807874
| 0.751049
| 0
| 0.002814
| 0.268235
| 12,627
| 347
| 78
| 36.389049
| 0.797078
| 0.031282
| 0
| 0.798507
| 0
| 0
| 0.123813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119403
| false
| 0
| 0.018657
| 0.026119
| 0.414179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14fd148080ad444739afb198a50a495620a394d0
| 264
|
py
|
Python
|
sfaira/data/store/__init__.py
|
theislab/sfaira
|
77a7b49936047a0cdddc5ace4482186a868c3a7a
|
[
"BSD-3-Clause"
] | 110
|
2020-09-08T07:47:15.000Z
|
2022-03-29T03:33:56.000Z
|
sfaira/data/store/__init__.py
|
theislab/sfaira
|
77a7b49936047a0cdddc5ace4482186a868c3a7a
|
[
"BSD-3-Clause"
] | 405
|
2020-09-15T15:05:46.000Z
|
2022-03-16T14:44:23.000Z
|
sfaira/data/store/__init__.py
|
theislab/sfaira
|
77a7b49936047a0cdddc5ace4482186a868c3a7a
|
[
"BSD-3-Clause"
] | 20
|
2021-03-30T15:30:14.000Z
|
2022-03-07T12:52:58.000Z
|
from sfaira.data.store.load_store import load_store
from sfaira.data.store.multi_store import DistributedStoreMultipleFeatureSpaceBase, \
DistributedStoresH5ad, DistributedStoresDao
from sfaira.data.store.single_store import DistributedStoreSingleFeatureSpace
| 52.8
| 85
| 0.882576
| 27
| 264
| 8.481481
| 0.444444
| 0.131004
| 0.183406
| 0.248908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004082
| 0.07197
| 264
| 4
| 86
| 66
| 0.930612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
090d13808c635795b1e71463ce4f8ff2bd169370
| 11,622
|
py
|
Python
|
3-Time-Series-Analysis/outlier_detection/demo.py
|
cs60050/TeamGabru
|
6cdc321ad0b2d9fc1ec5b0b638a880865d4d88c1
|
[
"MIT"
] | 14
|
2016-09-06T14:26:07.000Z
|
2021-08-07T15:59:33.000Z
|
3-Time-Series-Analysis/outlier_detection/demo.py
|
cs60050/TeamGabru
|
6cdc321ad0b2d9fc1ec5b0b638a880865d4d88c1
|
[
"MIT"
] | 2
|
2019-02-13T10:47:48.000Z
|
2019-04-03T09:28:09.000Z
|
3-Time-Series-Analysis/outlier_detection/demo.py
|
cs60050/TeamGabru
|
6cdc321ad0b2d9fc1ec5b0b638a880865d4d88c1
|
[
"MIT"
] | 5
|
2016-11-15T11:24:05.000Z
|
2019-05-24T02:54:39.000Z
|
predicted = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
import numpy as np
labels = np.reshape(predicted, (18,200))
import cv2
print(labels[0])
import glob
basepath = "/home/prabhat/Documents/Anomaly/dataset/UCSD_Anomaly_Dataset.v1p2/UCSDped1/Test"
import os
for i in range(19,37):
testpath = basepath + "/Test0"+str(i)
images = sorted(glob.glob(testpath + "/*.tif"))
#print(images)
for j, image in enumerate(images):
destfile = basepath+"/Test0"+str(i)+"/d"+str(j)+".png"
label = labels[i-19][j]
orig = cv2.imread(image, cv2.IMREAD_UNCHANGED)
orig = cv2.cvtColor(orig,cv2.COLOR_GRAY2BGR)
if(label == 1):
(rH, rW) = orig.shape[:2]
blank_image = np.zeros((rH,rW,3), np.uint8)
blank_image[:,:] = (0,0,255)
added = cv2.addWeighted( orig, 0.9, blank_image, 0.1, 0.0)
cv2.imwrite(destfile, added)
else:
cv2.imwrite(destfile, orig)
| 400.758621
| 10,812
| 0.357598
| 3,728
| 11,622
| 1.112929
| 0.017436
| 1.334779
| 1.939262
| 2.519161
| 0.867679
| 0.867679
| 0.867679
| 0.867679
| 0.867197
| 0.864787
| 0
| 0.460711
| 0.319997
| 11,622
| 28
| 10,813
| 415.071429
| 0.064279
| 0.001119
| 0
| 0
| 0
| 0
| 0.008873
| 0.006806
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.041667
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
117ed13e0d2186ad9c42043575ad3c865d443e49
| 111
|
py
|
Python
|
survae/tests/nn/layers/autoregressive/__init__.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 262
|
2020-07-05T20:57:44.000Z
|
2022-03-28T02:24:43.000Z
|
survae/tests/nn/layers/autoregressive/__init__.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 17
|
2020-08-15T05:43:34.000Z
|
2022-01-31T12:24:21.000Z
|
survae/tests/nn/layers/autoregressive/__init__.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 35
|
2020-08-24T06:55:37.000Z
|
2022-02-11T05:17:58.000Z
|
from .ar_shift import *
from .seq_reorder import *
from .masked_linear import *
from .masked_conv_2d import *
| 18.5
| 29
| 0.774775
| 17
| 111
| 4.764706
| 0.588235
| 0.37037
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 0.153153
| 111
| 5
| 30
| 22.2
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
11b0bca47b46004a45036d2786ffa7398e0baa27
| 8,192
|
py
|
Python
|
models/Rappor.py
|
HuskyW/FFPA
|
f96ceeafd6575b5ff93b4ee69d57e9cc3cb9336e
|
[
"MIT"
] | null | null | null |
models/Rappor.py
|
HuskyW/FFPA
|
f96ceeafd6575b5ff93b4ee69d57e9cc3cb9336e
|
[
"MIT"
] | null | null | null |
models/Rappor.py
|
HuskyW/FFPA
|
f96ceeafd6575b5ff93b4ee69d57e9cc3cb9336e
|
[
"MIT"
] | 1
|
2021-12-28T13:01:13.000Z
|
2021-12-28T13:01:13.000Z
|
import math
import multiprocess
import numpy as np
import itertools
import time
import random
from utils.Sampling import sampleClients
from models.Handlers import Handler
from models.Randomize import Randomizer
class RapporHandler(Handler):
def __init__(self,args,dataset):
self.args = args
self.dataset = dataset
self.args.eta = 1 / (math.e**(self.args.epsilon / 2.0) + 1.0)
self.orig_rec_num = self.dataset.get_line_num()
self.clients_num = self.orig_rec_num * self.args.duplicate
self.args.num_clients = self.clients_num
self.randomizer = Randomizer(self.args)
self.num_points = len(self.dataset.points)
self.one_hot_size = 2 ** self.num_points
if self.args.pattern_type != "itemset":
print("Error: rappor can only work for itemset datasets")
exit(0)
def __data2idx(self,data):
res = 0
for i in data:
res += (2**i)
return res
def __idx2data(self,idx):
res = []
nextitem = 0
while idx > 0:
if idx % 2 == 1:
res.append(nextitem)
idx = math.floor(idx/2)
nextitem += 1
return tuple(res)
def __oneClient(self,client_idx):
raw_data = self.dataset[client_idx].data
data_idx = self.__data2idx(raw_data)
res = np.zeros(self.one_hot_size,dtype='int')
res[data_idx] = 1
res = self.randomizer.randomBits(res)
return res
def __processWorker(self,proc_idx,participents,queue):
support = np.zeros(self.one_hot_size,dtype='int')
num_milestone = 5
milestone = math.floor(len(participents)/num_milestone)
for idx in range(len(participents)):
if idx > 0 and idx % milestone == 0 and int(idx/milestone) != num_milestone and self.args.verbose:
print("Worker %2d: %d%% done" % (proc_idx,int(round(idx*100/len(participents)))))
client_idx = participents[idx]
res = self.__oneClient(client_idx)
support += res
queue.put(support)
return
def __supportthres(self):
return self.args.num_participants * self.args.k / self.clients_num
def __estimateSupport(self,support):
noisyFreq = support / self.args.num_participants
estimatedFreq = (noisyFreq - self.args.eta) / (1 - 2 * self.args.eta)
return estimatedFreq * self.args.num_participants
def __expensionsets(self,i):
target = self.__idx2data(i)
l_target = len(target)
res = set()
for l in range(1,l_target+1):
for subset in itertools.combinations(list(target),l):
subset = list(subset)
subset.sort()
subid = self.__data2idx(tuple(subset))
res.add(subid)
return res
def run(self):
participents = sampleClients(self.args,self.orig_rec_num)
mananger = multiprocess.Manager()
queue = mananger.Queue()
jobs = []
workload = math.floor(len(participents)/self.args.process)
start_time = time.time()
for proc_idx in range(self.args.process):
if proc_idx == self.args.process - 1:
participents_load = participents[proc_idx*workload:len(participents)]
else:
participents_load = participents[proc_idx*workload:(proc_idx+1)*workload]
p = multiprocess.Process(target=self.__processWorker,args=(proc_idx,participents_load,queue))
jobs.append(p)
p.start()
for p in jobs:
p.join()
end_time = time.time()
results = [queue.get() for j in jobs]
support_count = np.zeros(self.one_hot_size,dtype='int')
for res in results:
support_count += res
print("Aggregating...")
estimated_support_count = [0] * self.one_hot_size
for i in range(self.one_hot_size):
estimated_support_count[i] = self.__estimateSupport(support_count[i])
regular_support_count = [0] * self.one_hot_size
for i in range(self.one_hot_size):
subs = self.__expensionsets(i)
for j in subs:
regular_support_count[j] += estimated_support_count[i]
fragments = set()
support_thres = self.__supportthres()
for i in range(self.one_hot_size):
if regular_support_count[i] >= support_thres:
good_frag = self.__idx2data(i)
fragments.add(good_frag)
print("Client time: %d sec" % int(end_time-start_time))
return fragments
class HitterRapporHandler(Handler):
def __init__(self,args,dataset):
self.args = args
self.dataset = dataset
self.args.eta = 1 / (math.e**(self.args.epsilon / 2.0) + 1.0)
self.orig_rec_num = self.dataset.get_line_num()
self.clients_num = self.orig_rec_num * self.args.duplicate
self.args.num_clients = self.clients_num
self.randomizer = Randomizer(self.args)
self.num_points = len(self.dataset.points)
self.one_hot_size = self.num_points
if self.args.pattern_type != "item":
print("Error: rappor can only work for itemset datasets")
exit(0)
def __oneClient(self,client_idx):
raw_data = self.dataset[client_idx].data
chosen = random.choice(tuple(raw_data))
res = np.zeros(self.one_hot_size,dtype='int')
res[chosen] = 1
res = self.randomizer.randomBits(res)
return res
def __processWorker(self,proc_idx,participents,queue):
support = np.zeros(self.one_hot_size,dtype='int')
num_milestone = 5
milestone = math.floor(len(participents)/num_milestone)
for idx in range(len(participents)):
if idx > 0 and idx % milestone == 0 and int(idx/milestone) != num_milestone and self.args.verbose:
print("Worker %2d: %d%% done" % (proc_idx,int(round(idx*100/len(participents)))))
client_idx = participents[idx]
res = self.__oneClient(client_idx)
support += res
queue.put(support)
return
def __supportthres(self):
return self.args.num_participants * self.args.k / self.clients_num
def __estimateSupport(self,support):
noisyFreq = support / self.args.num_participants
estimatedFreq = (noisyFreq - self.args.eta) / (1 - 2 * self.args.eta)
return estimatedFreq * self.args.num_participants * 5 # 5 is the minimum length of each record in kosarak dataset
def run(self):
participents = sampleClients(self.args,self.orig_rec_num)
mananger = multiprocess.Manager()
queue = mananger.Queue()
jobs = []
workload = math.floor(len(participents)/self.args.process)
start_time = time.time()
for proc_idx in range(self.args.process):
if proc_idx == self.args.process - 1:
participents_load = participents[proc_idx*workload:len(participents)]
else:
participents_load = participents[proc_idx*workload:(proc_idx+1)*workload]
p = multiprocess.Process(target=self.__processWorker,args=(proc_idx,participents_load,queue))
jobs.append(p)
p.start()
for p in jobs:
p.join()
end_time = time.time()
results = [queue.get() for j in jobs]
support_count = np.zeros(self.one_hot_size,dtype='int')
for res in results:
support_count += res
print("Aggregating...")
estimated_support_count = [0] * self.one_hot_size
for i in range(self.one_hot_size):
estimated_support_count[i] = self.__estimateSupport(support_count[i])
fragments = set()
support_thres = self.__supportthres()
for i in range(self.one_hot_size):
if estimated_support_count[i] >= support_thres:
fragments.add(i)
print("Client time: %d sec" % int(end_time-start_time))
return fragments
| 36.088106
| 127
| 0.609131
| 1,010
| 8,192
| 4.736634
| 0.143564
| 0.063545
| 0.033445
| 0.046823
| 0.828177
| 0.817726
| 0.817726
| 0.817726
| 0.803512
| 0.803512
| 0
| 0.009935
| 0.287354
| 8,192
| 226
| 128
| 36.247788
| 0.809524
| 0.006958
| 0
| 0.711957
| 0
| 0
| 0.028649
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081522
| false
| 0
| 0.048913
| 0.01087
| 0.211957
| 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eeb563a5e08794fd5a6dcc325e1eeed31c7cf6e9
| 145
|
py
|
Python
|
_main/home/pi/kmori-Pla-Rail-Scripts/status.py
|
Kumapapa2012/Raspberry-Pi-Zero-on-PLA-RAIL
|
2dcbd2f11bd92ef1b9be569c2a97c9117d984cef
|
[
"MIT"
] | null | null | null |
_main/home/pi/kmori-Pla-Rail-Scripts/status.py
|
Kumapapa2012/Raspberry-Pi-Zero-on-PLA-RAIL
|
2dcbd2f11bd92ef1b9be569c2a97c9117d984cef
|
[
"MIT"
] | null | null | null |
_main/home/pi/kmori-Pla-Rail-Scripts/status.py
|
Kumapapa2012/Raspberry-Pi-Zero-on-PLA-RAIL
|
2dcbd2f11bd92ef1b9be569c2a97c9117d984cef
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import utils
print "Main Register:"
print utils.Get8830Status()
print "Fault Register:"
print utils.Get8830Status_Fault()
| 18.125
| 33
| 0.77931
| 19
| 145
| 5.894737
| 0.578947
| 0.232143
| 0.321429
| 0.553571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0.103448
| 145
| 7
| 34
| 20.714286
| 0.8
| 0.137931
| 0
| 0
| 0
| 0
| 0.233871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.2
| null | null | 0.8
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
eed100b7b7926f0934511f5f2e6456b8b40e0aef
| 95,841
|
py
|
Python
|
test/client/downlinks/test_downlinks.py
|
DobromirM/swim-system-python
|
a5b4f05457f1eb2739a920c42dfc721c83a1226a
|
[
"Apache-2.0"
] | 8
|
2019-11-11T19:38:59.000Z
|
2022-01-06T11:13:04.000Z
|
test/client/downlinks/test_downlinks.py
|
swimos/swim-system-python
|
727c09b6e7300b063e320364373ff724d9b8af90
|
[
"Apache-2.0"
] | 40
|
2019-10-29T10:35:49.000Z
|
2021-05-14T22:18:35.000Z
|
test/client/downlinks/test_downlinks.py
|
DobromirM/swim-system-python
|
a5b4f05457f1eb2739a920c42dfc721c83a1226a
|
[
"Apache-2.0"
] | 3
|
2020-01-31T18:28:58.000Z
|
2021-08-25T08:53:13.000Z
|
# Copyright 2015-2021 SWIM.AI inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import aiounittest
from concurrent.futures import Future
from unittest.mock import patch
from swimai import SwimClient
from swimai.client._connections import _DownlinkManager, _DownlinkManagerStatus
from swimai.client._downlinks._downlinks import _EventDownlinkModel, _DownlinkModel, _ValueDownlinkModel, \
_EventDownlinkView, \
_DownlinkView, _ValueDownlinkView, _MapDownlinkModel, _MapDownlinkView
from swimai.client._downlinks._utils import UpdateRequest, RemoveRequest
from swimai.structures import Text, Attr, RecordMap, Num, Bool, Slot, Value
from swimai.structures._structs import _Absent, _Record
from swimai.warp._warp import _LinkedResponse, _SyncedResponse, _EventMessage, _UnlinkedResponse
from test.utils import MockConnection, MockExecuteOnException, MockWebsocketConnect, MockWebsocket, \
mock_did_set_confirmation, ReceiveLoop, MockPerson, MockPet, NewScope, MockNoDefaultConstructor, MockCar, \
MockModel, MockDownlinkManager, mock_on_event_callback, MockEventCallback, \
MockDidSetCallback, mock_did_set_callback, MockDidUpdateCallback, mock_did_update_callback, \
mock_did_remove_callback, MockDidRemoveCallback
class TestDownlinks(aiounittest.AsyncTestCase):
def setUp(self):
MockWebsocket.clear()
MockConnection.clear()
async def test_create_event_downlink_model(self):
# Given
with SwimClient() as client:
# When
actual = _EventDownlinkModel(client)
# Then
self.assertIsInstance(actual, _EventDownlinkModel)
self.assertIsInstance(actual, _DownlinkModel)
self.assertEqual(client, actual.client)
self.assertFalse(actual.linked.is_set())
async def test_open_downlink_model(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
# When
actual = downlink._open()
# Then
self.assertFalse(actual.task.done())
self.assertTrue(actual.task.done())
self.assertEqual(downlink, actual)
self.assertIsInstance(actual.task, Future)
@patch('swimai.client._connections._DownlinkManager._close_views')
async def test_close_downlink_model_with_manager(self, mock_close_views):
# Given
with SwimClient() as client:
downlink = _EventDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink = downlink._open()
manager = _DownlinkManager(MockConnection.get_mock_connection())
downlink.downlink_manager = manager
# When
actual = downlink._close()
while not actual.task.done():
pass
# Then
self.assertEqual(downlink, actual)
self.assertIsInstance(actual.task, Future)
self.assertTrue(actual.task.done())
self.assertTrue(actual.task.cancelled())
self.assertTrue(mock_close_views.called)
@patch('swimai.client._connections._DownlinkManager._close_views')
async def test_close_downlink_model_without_manager(self, mock_close_views):
# Given
with SwimClient() as client:
downlink = _EventDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink = downlink._open()
# When
actual = downlink._close()
while not actual.task.done():
pass
# Then
self.assertEqual(downlink, actual)
self.assertIsInstance(actual.task, Future)
self.assertTrue(actual.task.done())
self.assertTrue(actual.task.cancelled())
self.assertFalse(mock_close_views.called)
async def test_downlink_model_receive_message_linked(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink.connection.owner = downlink
linked_message = _LinkedResponse('linked_node', 'linked_lane')
downlink.connection.messages_to_receive.append(linked_message)
# When
actual = downlink._open()
while not actual.linked.is_set():
pass
# Then
self.assertEqual(downlink, actual)
self.assertTrue(actual.linked.is_set())
async def test_downlink_model_receive_message_synced(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink.connection.owner = downlink
synced_message = _SyncedResponse('synced_node', 'synced_lane')
downlink.connection.messages_to_receive.append(synced_message)
# When
actual = downlink._open()
while not actual._synced.is_set():
pass
# Then
self.assertEqual(downlink, actual)
self.assertTrue(actual._synced.is_set())
async def test_downlink_model_receive_message_event(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink.connection.owner = downlink
downlink_manager = _DownlinkManager(downlink.connection)
downlink.downlink_manager = downlink_manager
event_message = _EventMessage('event_node', 'event_lane',
_Record.create_from(Text.create_from('event_body')))
downlink.connection.messages_to_receive.append(event_message)
# When
actual = downlink._open()
while not actual._value:
pass
# Then
self.assertEqual(downlink, actual)
self.assertEqual('event_body', actual._value)
@patch('warnings.warn')
async def test_downlink_model_receive_message_unlinked(self, mock_warn):
# Given
with SwimClient(execute_on_exception=MockExecuteOnException.get_mock_execute_on_exception()) as client:
downlink = _EventDownlinkModel(client)
downlink.connection = MockConnection.get_mock_connection()
downlink.connection.owner = downlink
body = RecordMap.create()
body.add(Attr.create_attr('laneNotFound', 'foo'))
unlinked_message = _UnlinkedResponse('unlinked_node', 'unlinked_lane', body=body)
downlink.connection.messages_to_receive.append(unlinked_message)
# When
actual = downlink._open()
while not MockExecuteOnException.get_mock_execute_on_exception().called:
pass
# Then
self.assertEqual(downlink, actual)
self.assertEqual('Lane "None" was not found on the remote agent!', mock_warn.call_args_list[0][0][0])
async def test_create_event_downlink_view(self):
# Given
with SwimClient() as client:
# When
actual = _EventDownlinkView(client)
# Then
self.assertIsInstance(actual, _EventDownlinkView)
self.assertIsInstance(actual, _DownlinkView)
self.assertEqual(client, actual._client)
self.assertFalse(actual._is_open)
self.assertFalse(actual.strict)
async def test_downlink_view_set_host_uri_ws(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
host_uri = 'ws://127.0.0.1'
# When
actual = downlink.set_host_uri(host_uri)
# Then
self.assertEqual(downlink, actual)
self.assertEqual(host_uri, actual._host_uri)
async def test_downlink_view_set_host_uri_warp(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
host_uri = 'warp://127.0.0.1'
# When
actual = downlink.set_host_uri(host_uri)
# Then
self.assertEqual(downlink, actual)
self.assertEqual('ws://127.0.0.1', actual._host_uri)
@patch('warnings.warn')
async def test_downlink_view_set_host_uri_after_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
host_uri = 'ws://127.0.0.1'
downlink._is_open = True
# When
downlink.set_host_uri(host_uri)
# Then
self.assertEqual('Cannot execute "set_host_uri" after the downlink has been opened!',
mock_warn.mock_calls[0][1][0])
async def test_downlink_view_set_node_uri(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
node_uri = 'boo/bar'
# When
actual = downlink.set_node_uri(node_uri)
# Then
self.assertEqual(downlink, actual)
self.assertEqual(node_uri, actual._node_uri)
@patch('warnings.warn')
async def test_downlink_view_set_node_uri_after_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
node_uri = 'foo'
downlink._is_open = True
# When
downlink.set_node_uri(node_uri)
# Then
self.assertEqual('Cannot execute "set_node_uri" after the downlink has been opened!',
mock_warn.mock_calls[0][1][0])
async def test_downlink_view_set_lane_uri(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
lane_uri = 'shop'
# When
actual = downlink.set_lane_uri(lane_uri)
# Then
self.assertEqual(downlink, actual)
self.assertEqual(lane_uri, actual._lane_uri)
@patch('warnings.warn')
async def test_downlink_view_set_lane_uri_after_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
lane_uri = 'bar'
downlink._is_open = True
# When
downlink.set_lane_uri(lane_uri)
# Then
self.assertEqual('Cannot execute "set_lane_uri" after the downlink has been opened!',
mock_warn.mock_calls[0][1][0])
async def test_downlink_view_route(self):
# Given
with SwimClient() as client:
downlink = _EventDownlinkView(client)
node_uri = 'boo/bar'
lane_uri = 'shop'
downlink.set_node_uri(node_uri)
downlink.set_lane_uri(lane_uri)
# When
actual = downlink.route
# Then
self.assertEqual(f'{node_uri}/{lane_uri}', actual)
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_open(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
# When
actual = downlink.open()
while loop_class.call_count == 0:
pass
# Then
self.assertTrue(actual._is_open)
self.assertFalse(actual._is_open)
self.assertTrue(mock_websocket_connect.called)
self.assertIsInstance(actual._model, _DownlinkModel)
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_close(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
# When
actual = downlink.close()
# Then
self.assertFalse(actual._is_open)
self.assertTrue(mock_websocket_connect.called)
self.assertIsInstance(actual._model, _DownlinkModel)
async def test_downlink_view_get_registered_classes_from_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_person_class = MockPerson
downlink.register_class(mock_person_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_get_registered_classes_from_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
# When
mock_person_class = MockPerson
downlink.register_class(mock_person_class)
downlink.open()
while loop_class.call_count == 0:
pass
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
async def test_downlink_view_get_strict_from_self(self):
# Given
with SwimClient() as client:
# When
downlink = _ValueDownlinkView(client)
# Then
self.assertFalse(downlink.strict)
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_get_strict_from_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
# When
downlink.open()
while loop_class.call_count == 0:
pass
# Then
self.assertFalse(downlink.strict)
self.assertTrue(mock_websocket_connect.called)
async def test_downlink_view_set_strict_to_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
downlink.strict = True
# Then
self.assertTrue(downlink.strict)
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_set_strict_to_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
# When
downlink.strict = True
# Then
self.assertTrue(downlink.strict)
self.assertTrue(mock_websocket_connect.called)
async def test_downlink_view_register_classes(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_person_class = MockPerson
mock_pet_class = MockPet
downlink.register_classes([mock_person_class, mock_pet_class])
# Then
self.assertEqual(2, len(downlink.registered_classes))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
async def test_downlink_view_register_class_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_pet_class = MockPet
downlink.register_class(mock_pet_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
async def test_downlink_view_register_deregistered_class_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_pet_class = MockPet
downlink.register_class(mock_pet_class)
self.assertEqual(1, len(downlink.registered_classes))
downlink.deregister_class(mock_pet_class)
self.assertEqual(0, len(downlink.registered_classes))
downlink.register_class(mock_pet_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
async def test_downlink_view_overwrite_register_class_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_person_class = MockPerson
mock_second_person_class = NewScope.MockPerson
downlink.register_class(mock_person_class)
downlink.register_class(mock_second_person_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(mock_second_person_class, downlink.registered_classes.get('MockPerson'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_register_class_downlink_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
# When
mock_person_class = MockPerson
downlink.register_class(mock_person_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_overwrite_register_class_downlink_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
# When
mock_person_class = MockPerson
downlink.register_class(mock_person_class)
mock_second_person_class = NewScope.MockPerson
downlink.register_class(mock_second_person_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_second_person_class, downlink.registered_classes.get('MockPerson'))
@patch('warnings.warn')
async def test_downlink_view_register_class_exception_no_default_constructor(self, mock_warn):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
# When
mock_no_default_constructor = MockNoDefaultConstructor
downlink.register_class(mock_no_default_constructor)
# Then
self.assertEqual(
'Class "MockNoDefaultConstructor" must have a default constructor or default values for all arguments!',
mock_warn.mock_calls[0][1][0])
self.assertEqual(0, len(downlink.registered_classes))
async def test_downlink_view_deregister_all_classes_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
self.assertTrue(3, len(downlink.registered_classes))
# When
downlink.deregister_all_classes()
# Then
self.assertEqual(0, len(downlink.registered_classes))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_deregister_all_classes_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
self.assertTrue(3, len(downlink.registered_classes))
# When
downlink.deregister_all_classes()
# Then
self.assertEqual(0, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
async def test_downlink_view_deregister_classes(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
self.assertTrue(3, len(downlink.registered_classes))
# When
downlink.deregister_classes([mock_pet_class, mock_person_class])
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(mock_car_class, downlink.registered_classes.get('MockCar'))
async def test_downlink_view_deregister_class_self(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
self.assertTrue(3, len(downlink.registered_classes))
self.assertEqual(mock_car_class, downlink.registered_classes.get('MockCar'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
# When
downlink.deregister_class(mock_car_class)
# Then
self.assertEqual(2, len(downlink.registered_classes))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
async def test_downlink_view_deregister_class_self_missing(self):
# Given
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
self.assertTrue(2, len(downlink.registered_classes))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
# When
downlink.deregister_class(mock_car_class)
# Then
self.assertEqual(2, len(downlink.registered_classes))
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_deregister_class_manager(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
self.assertTrue(3, len(downlink.registered_classes))
# When
downlink.deregister_class(mock_car_class)
# Then
self.assertEqual(2, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_deregister_class_manager_missing(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.open()
while loop_class.call_count == 0:
pass
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
self.assertTrue(2, len(downlink.registered_classes))
# When
downlink.deregister_class(mock_car_class)
# Then
self.assertEqual(2, len(downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_pet_class, downlink.registered_classes.get('MockPet'))
async def test_downlink_view_assign_manager(self):
# Given
connection = MockWebsocketConnect()
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.strict = True
mock_person_class = MockPerson
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_car_class)
manager = _DownlinkManager(connection)
manager.downlink_model = MockModel()
# When
await downlink._assign_manager(manager)
# Then
self.assertEqual(manager.downlink_model, downlink._model)
self.assertEqual(manager.registered_classes, downlink.registered_classes)
self.assertEqual(manager.strict, downlink.strict)
self.assertEqual(manager, downlink._downlink_manager)
self.assertTrue(manager.strict)
self.assertIsInstance(manager.downlink_model, MockModel)
self.assertEqual(2, len(manager.registered_classes))
self.assertEqual(mock_person_class, manager.registered_classes.get('MockPerson'))
self.assertEqual(mock_car_class, manager.registered_classes.get('MockCar'))
async def test_downlink_view_initialise_model(self):
# Given
connection = MockWebsocketConnect()
manager = _DownlinkManager(connection)
with SwimClient() as client:
model = _ValueDownlinkModel(client)
downlink = _ValueDownlinkView(client)
downlink.strict = True
mock_person_class = MockPerson
mock_car_class = MockCar
downlink.register_class(mock_person_class)
downlink.register_class(mock_car_class)
host_uri = 'ws://host_uri'
lane_uri = 'lane/uri'
node_uri = 'node_uri'
downlink.set_host_uri(host_uri)
downlink.set_lane_uri(lane_uri)
downlink.set_node_uri(node_uri)
# When
await downlink._initalise_model(manager, model)
# Then
self.assertEqual(downlink.registered_classes, manager.registered_classes)
self.assertEqual(downlink.strict, manager.strict)
self.assertEqual(manager, model.downlink_manager)
self.assertEqual(host_uri, model.host_uri)
self.assertEqual(node_uri, model.node_uri)
self.assertEqual(lane_uri, model.lane_uri)
self.assertTrue(manager.strict)
self.assertEqual(mock_person_class, manager.registered_classes.get('MockPerson'))
self.assertEqual(mock_car_class, manager.registered_classes.get('MockCar'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_register_and_deregister_classes(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
downlink.open()
while loop_class.call_count == 0:
pass
self.assertTrue(3, len(downlink.registered_classes))
# When
second_downlink = _ValueDownlinkView(client)
second_downlink.set_host_uri('ws://127.0.0.1')
second_downlink.set_node_uri('boo/bar')
second_downlink.set_lane_uri('shop')
second_downlink.deregister_class(mock_pet_class)
second_downlink.open()
self.assertTrue(2, len(downlink.registered_classes))
second_downlink.deregister_class(mock_car_class)
# Then
self.assertEqual(1, len(downlink.registered_classes))
self.assertEqual(1, len(second_downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
self.assertEqual(mock_person_class, downlink.registered_classes.get('MockPerson'))
self.assertEqual(mock_person_class, second_downlink.registered_classes.get('MockPerson'))
@patch('websockets.connect', new_callable=MockWebsocketConnect)
async def test_downlink_view_register_and_deregister_all(self, mock_websocket_connect):
# Given
message = '@event(node:"boo/bar",lane:shop)'
MockWebsocket.get_mock_websocket().messages_to_send.append(message)
loop_class = ReceiveLoop()
MockWebsocket.get_mock_websocket().custom_recv_func = loop_class.recv_loop
mock_person_class = MockPerson
mock_pet_class = MockPet
mock_car_class = MockCar
with SwimClient() as client:
downlink = _ValueDownlinkView(client)
downlink.set_host_uri('ws://127.0.0.1')
downlink.set_node_uri('boo/bar')
downlink.set_lane_uri('shop')
downlink.did_set(mock_did_set_confirmation)
downlink.register_class(mock_person_class)
downlink.register_class(mock_pet_class)
downlink.register_class(mock_car_class)
downlink.open()
while loop_class.call_count == 0:
pass
self.assertTrue(3, len(downlink.registered_classes))
# When
second_downlink = _ValueDownlinkView(client)
second_downlink.set_host_uri('ws://127.0.0.1')
second_downlink.set_node_uri('boo/bar')
second_downlink.set_lane_uri('shop')
second_downlink.deregister_all_classes()
second_downlink.open()
# Then
self.assertEqual(0, len(downlink.registered_classes))
self.assertEqual(0, len(second_downlink.registered_classes))
self.assertTrue(mock_websocket_connect.called)
async def test_event_downlink_model_establish_downlink(self):
# Given
with SwimClient() as client:
downlink_model = _EventDownlinkModel(client)
downlink_model.node_uri = 'foo'
downlink_model.lane_uri = 'bar'
downlink_model.connection = MockConnection()
# When
await downlink_model._establish_downlink()
# Then
self.assertEqual(1, len(downlink_model.connection.messages_sent))
self.assertEqual('@link(node:foo,lane:bar)', downlink_model.connection.messages_sent[0])
async def test_event_downlink_model_received_synced(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# When
with self.assertRaises(TypeError) as error:
await downlink_model._receive_synced()
# Then
message = error.exception.args[0]
self.assertEqual(message, 'Event downlink does not support synced responses!')
async def test_event_downlink_receive_event_absent(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
event_message = _EventMessage(node_uri='foo', lane_uri='bar')
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertIsInstance(mock_manager.event, _Absent)
async def test_event_downlink_receive_event_text(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Text.create_from('message'))
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual('message', mock_manager.event)
async def test_event_downlink_receive_event_num(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Num.create_from(21))
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(21, mock_manager.event)
async def test_event_downlink_receive_event_bool(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Bool.create_from(True))
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(True, mock_manager.event)
async def test_event_downlink_receive_event_object(self):
# Given
client = SwimClient()
downlink_model = _EventDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
recon_person = RecordMap.create()
recon_person.add(Attr.create_attr('MockPerson', Value.extant()))
recon_person.add(Slot.create_slot(Text.create_from('name'), Text.create_from('George')))
recon_person.add(Slot.create_slot(Text.create_from('age'), Num.create_from(25)))
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=recon_person)
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(25, mock_manager.event.age)
self.assertEqual('George', mock_manager.event.name)
async def test_event_downlink_view_register_manager(self):
# Given
client = SwimClient()
downlink_view = _EventDownlinkView(client)
downlink_model = _EventDownlinkModel(client)
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
manager.downlink_model = downlink_model
# When
await downlink_view._register_manager(manager)
# Then
self.assertEqual(downlink_model, downlink_view._model)
self.assertEqual(manager, downlink_view._downlink_manager)
async def test_event_downlink_view_create_downlink_model(self):
# Given
client = SwimClient()
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
host_uri = 'ws://foo.bar'
node_uri = 'baz'
lane_uri = 'qux'
downlink_view = _EventDownlinkView(client)
downlink_view.set_host_uri(host_uri)
downlink_view.set_node_uri(node_uri)
downlink_view.set_lane_uri(lane_uri)
downlink_view.strict = True
mock_person_class = MockPerson
downlink_view.register_class(mock_person_class)
# When
actual = await downlink_view._create_downlink_model(manager)
# Then
self.assertIsInstance(actual, _EventDownlinkModel)
self.assertEqual(client, actual.client)
self.assertEqual(downlink_view.strict, actual.downlink_manager.strict)
self.assertEqual(downlink_view.registered_classes, actual.downlink_manager.registered_classes)
self.assertEqual(downlink_view._host_uri, actual.host_uri)
self.assertEqual(downlink_view._node_uri, actual.node_uri)
self.assertEqual(downlink_view._lane_uri, actual.lane_uri)
async def test_event_downlink_view_execute_on_event(self):
# Given
with SwimClient() as client:
downlink_view = _EventDownlinkView(client)
mock_on_event = MockEventCallback()
downlink_view._on_event_callback = mock_on_event.execute
event = 20
# When
await downlink_view._execute_on_event(event)
while not mock_on_event.called:
pass
# Then
self.assertEqual(20, mock_on_event.event)
self.assertTrue(mock_on_event.called)
async def test_event_downlink_view_execute_on_event_missing_callback(self):
# Given
with SwimClient() as client:
downlink_view = _EventDownlinkView(client)
event = 20
# When
with patch('swimai.SwimClient._schedule_task') as mock_schedule_task:
await downlink_view._execute_on_event(event)
# Then
self.assertFalse(mock_schedule_task.called)
async def test_event_downlink_view_set_on_event(self):
# Given
client = SwimClient()
downlink_view = _EventDownlinkView(client)
# When
downlink_view.on_event(mock_on_event_callback)
# Then
self.assertEqual(mock_on_event_callback, downlink_view._on_event_callback)
async def test_event_downlink_view_set_on_event_invalid(self):
# Given
client = SwimClient()
downlink_view = _EventDownlinkView(client)
# When
with self.assertRaises(TypeError) as error:
# noinspection PyTypeChecker
downlink_view.on_event(222)
# Then
message = error.exception.args[0]
self.assertEqual('Callback must be a coroutine or a function!', message)
async def test_create_value_downlink_model(self):
# Given
with SwimClient() as client:
# When
actual = _ValueDownlinkModel(client)
# Then
self.assertIsInstance(actual, _ValueDownlinkModel)
self.assertIsInstance(actual, _DownlinkModel)
self.assertEqual(client, actual.client)
self.assertFalse(actual.linked.is_set())
self.assertFalse(actual._synced.is_set())
self.assertFalse(actual._value)
self.assertEqual(actual._value, Value.absent())
async def test_value_downlink_model_establish_downlink(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
downlink_model.node_uri = 'foo'
downlink_model.lane_uri = 'bar'
downlink_model.connection = MockConnection()
# When
await downlink_model._establish_downlink()
# Then
self.assertEqual(1, len(downlink_model.connection.messages_sent))
self.assertEqual('@sync(node:foo,lane:bar)', downlink_model.connection.messages_sent[0])
async def test_value_downlink_model_receive_synced(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# When
await downlink_model._receive_synced()
# Then
self.assertTrue(downlink_model._synced.is_set())
async def test_value_downlink_model_receive_event_absent(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
# When
event_message = _EventMessage(node_uri='foo', lane_uri='bar')
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(Value.absent(), downlink_model._value)
self.assertEqual(1, mock_manager.called)
self.assertEqual(Value.absent(), mock_manager.did_set_new)
self.assertEqual(Value.absent(), mock_manager.did_set_old)
async def test_value_downlink_model_receive_event_text(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
# When
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Text.create_from('value_text'))
await downlink_model._receive_event(event_message)
# Then
self.assertEqual('value_text', downlink_model._value)
self.assertEqual(1, mock_manager.called)
self.assertEqual('value_text', mock_manager.did_set_new)
self.assertEqual(Value.absent(), mock_manager.did_set_old)
async def test_value_downlink_model_receive_event_num(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
# When
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Num.create_from(11))
await downlink_model._receive_event(event_message)
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Num.create_from(50))
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(50, downlink_model._value)
self.assertEqual(2, mock_manager.called)
self.assertEqual(50, mock_manager.did_set_new)
self.assertEqual(11, mock_manager.did_set_old)
async def test_value_downlink_model_receive_event_bool(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
# When
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=Bool.create_from(True))
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(True, downlink_model._value)
self.assertEqual(1, mock_manager.called)
self.assertEqual(True, mock_manager.did_set_new)
self.assertEqual(Value.absent(), mock_manager.did_set_old)
async def test_value_downlink_model_receive_event_object(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
recon_person = RecordMap.create()
recon_person.add(Attr.create_attr('MockPerson', Value.extant()))
recon_person.add(Slot.create_slot(Text.create_from('name'), Text.create_from('Peter')))
recon_person.add(Slot.create_slot(Text.create_from('age'), Num.create_from(90)))
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=recon_person)
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual('Peter', downlink_model._value.name)
self.assertEqual(90, downlink_model._value.age)
self.assertEqual(1, mock_manager.called)
self.assertEqual('Peter', mock_manager.did_set_new.name)
self.assertEqual(90, mock_manager.did_set_new.age)
self.assertEqual(Value.absent(), mock_manager.did_set_old)
async def test_value_downlink_model_send_message(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
downlink_model.connection = MockConnection.get_mock_connection()
downlink_model.connection.owner = downlink_model
downlink_model.linked.set()
recon_person = RecordMap.create()
recon_person.add(Attr.create_attr('MockPerson', Value.extant()))
recon_person.add(Slot.create_slot(Text.create_from('name'), Text.create_from('Peter')))
recon_person.add(Slot.create_slot(Text.create_from('age'), Num.create_from(90)))
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=recon_person)
# When
await downlink_model._send_message(event_message)
# Then
self.assertEqual('@event(node:foo,lane:bar)@MockPerson{name:Peter,age:90}',
MockConnection.get_mock_connection().messages_sent[0])
async def test_value_downlink_model_get_value(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
downlink_model.connection = MockConnection.get_mock_connection()
downlink_model.connection.owner = downlink_model
downlink_model._value = 'Test_Value'
downlink_model._synced.set()
# When
actual = await downlink_model._get_value()
# Then
self.assertEqual('Test_Value', actual)
async def test_create_value_downlink_view(self):
# Given
with SwimClient() as client:
# When
actual = _ValueDownlinkView(client)
# Then
self.assertIsInstance(actual, _ValueDownlinkView)
self.assertIsInstance(actual, _DownlinkView)
self.assertEqual(client, actual._client)
self.assertFalse(actual._is_open)
self.assertFalse(actual.strict)
self.assertIsNone(actual._did_set_callback)
self.assertFalse(actual._initialised.is_set())
async def test_value_downlink_view_register_manager_first_time(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
downlink_model = _ValueDownlinkModel(client)
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
manager.downlink_model = downlink_model
# When
await downlink_view._register_manager(manager)
# Then
self.assertTrue(downlink_view._initialised.is_set())
self.assertEqual(downlink_model, downlink_view._model)
self.assertEqual(manager, downlink_view._downlink_manager)
async def test_value_downlink_view_register_manager_already_existing(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
mock_did_set = MockDidSetCallback()
downlink_view.did_set(mock_did_set.execute)
downlink_model = _ValueDownlinkModel(client)
downlink_model._value = 'Foo'
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
manager.downlink_model = downlink_model
manager.status = _DownlinkManagerStatus.OPEN
# When
await downlink_view._register_manager(manager)
while not mock_did_set.called:
pass
# Then
self.assertTrue(downlink_view._initialised.is_set())
self.assertEqual(downlink_model, downlink_view._model)
self.assertEqual(manager, downlink_view._downlink_manager)
self.assertTrue(mock_did_set.called)
self.assertEqual('Foo', mock_did_set.new_value)
self.assertEqual(Value.absent(), mock_did_set.old_value)
async def test_value_downlink_view_create_downlink_model(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
connection = MockConnection()
host_uri = 'ws://value.downlink'
node_uri = 'value'
lane_uri = 'downlink'
downlink_view.set_host_uri(host_uri)
downlink_view.set_node_uri(node_uri)
downlink_view.set_lane_uri(lane_uri)
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
# When
actual = await downlink_view._create_downlink_model(manager)
# Then
self.assertIsInstance(actual, _ValueDownlinkModel)
self.assertEqual(client, actual.client)
self.assertEqual(host_uri, actual.host_uri)
self.assertEqual(node_uri, actual.node_uri)
self.assertEqual(lane_uri, actual.lane_uri)
self.assertEqual(manager, actual.downlink_manager)
async def test_value_downlink_view_did_set_valid(self):
# Given
client = SwimClient()
downlink_view = _ValueDownlinkView(client)
function = mock_did_set_callback
# When
downlink_view.did_set(function)
# Then
self.assertTrue(function, downlink_view._did_set_callback)
async def test_value_downlink_view_did_set_invalid(self):
# Given
client = SwimClient()
downlink_view = _ValueDownlinkView(client)
function = 111
# When
with self.assertRaises(TypeError) as error:
# noinspection PyTypeChecker
downlink_view.did_set(function)
# Then
message = error.exception.args[0]
self.assertEqual(message, 'Callback must be a coroutine or a function!')
async def test_value_downlink_view_value_no_model(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
# When
actual = downlink_view._value
# Then
self.assertEqual(Value.absent(), actual)
async def test_value_downlink_view_value_with_model(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
downlink_view._model = _ValueDownlinkModel(client)
downlink_view._model._value = 50
# When
actual = downlink_view._value
# Then
self.assertEqual(50, actual)
async def test_value_downlink_view_get_immediate(self):
# Given
client = SwimClient()
downlink_view = _ValueDownlinkView(client)
downlink_model = _ValueDownlinkModel(client)
downlink_model._value = 41
downlink_view._model = downlink_model
downlink_view._is_open = True
# When
actual = downlink_view.get(False)
# Then
self.assertEqual(41, actual)
async def test_value_downlink_view_get_with_wait(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
downlink_model = _ValueDownlinkModel(client)
downlink_model._value = 'Some text'
downlink_model._synced.set()
downlink_view._model = downlink_model
downlink_view._initialised.set()
downlink_view._is_open = True
# When
actual = downlink_view.get(wait_sync=True)
# Then
self.assertEqual('Some text', actual)
@patch('warnings.warn')
async def test_value_downlink_view_get_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
# When
downlink_view.get()
# Then
self.assertEqual('Cannot execute "get" before the downlink has been opened!', mock_warn.call_args_list[0][0][0])
@patch('concurrent.futures._base.Future.result')
async def test_value_downlink_view_set_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'bar_node'
lane_uri = 'foo_lane'
downlink_model = _ValueDownlinkModel(client)
downlink_model.linked.set()
mock_connection = MockConnection()
downlink_model.connection = mock_connection
downlink_model.node_uri = node_uri
downlink_model.lane_uri = lane_uri
downlink_view = _ValueDownlinkView(client)
downlink_view._initialised.set()
downlink_view._model = downlink_model
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
# When
downlink_view.set(66, blocking=True)
# Then
self.assertEqual('@command(node:bar_node,lane:foo_lane)66', mock_connection.messages_sent[0])
self.assertTrue(mock_result.called)
@patch('concurrent.futures._base.Future.result')
async def test_value_downlink_view_set_non_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'bar_node'
lane_uri = 'foo_lane'
downlink_model = _ValueDownlinkModel(client)
downlink_model.linked.set()
mock_connection = MockConnection()
downlink_model.connection = mock_connection
downlink_model.node_uri = node_uri
downlink_model.lane_uri = lane_uri
downlink_view = _ValueDownlinkView(client)
downlink_view._initialised.set()
downlink_view._model = downlink_model
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
# When
downlink_view.set(66, blocking=False)
# Then
self.assertEqual('@command(node:bar_node,lane:foo_lane)66', mock_connection.messages_sent[0])
self.assertFalse(mock_result.called)
@patch('warnings.warn')
async def test_value_downlink_view_set_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
# When
downlink_view.set('value')
# Then
self.assertEqual('Cannot execute "set" before the downlink has been opened!', mock_warn.call_args_list[0][0][0])
async def test_value_downlink_view_execute_did_set(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
mock_did_set = MockDidSetCallback()
downlink_view._did_set_callback = mock_did_set.execute
new_value = 'Test_new_value'
old_value = 'Test_old_value'
# When
await downlink_view._execute_did_set(new_value, old_value)
while not mock_did_set.called:
pass
# Then
self.assertEqual(new_value, mock_did_set.new_value)
self.assertEqual(old_value, mock_did_set.old_value)
async def test_value_downlink_view_execute_did_set_no_callback(self):
# Given
with SwimClient() as client:
downlink_view = _ValueDownlinkView(client)
new_value = 'Test_new_value'
old_value = 'Test_old_value'
# When
with patch('swimai.SwimClient._schedule_task') as mock_schedule_task:
await downlink_view._execute_did_set(new_value, old_value)
# Then
self.assertFalse(mock_schedule_task.called)
async def test_value_downlink_view_send_message(self):
# Given
with SwimClient() as client:
downlink_model = _ValueDownlinkModel(client)
downlink_model.linked.set()
mock_connection = MockConnection()
downlink_model.connection = mock_connection
downlink_view = _ValueDownlinkView(client)
downlink_view._initialised.set()
downlink_view._model = downlink_model
downlink_view._node_uri = 'node_bar'
downlink_view._lane_uri = 'lane_baz'
# When
await downlink_view._send_message(2020)
# Then
self.assertEqual('@command(node:node_bar,lane:lane_baz)2020', mock_connection.messages_sent[0])
async def test_create_map_downlink_model(self):
# Given
with SwimClient() as client:
# When
actual = _MapDownlinkModel(client)
# Then
self.assertIsInstance(actual, _MapDownlinkModel)
self.assertIsInstance(actual, _DownlinkModel)
self.assertEqual(client, actual.client)
self.assertFalse(actual.linked.is_set())
self.assertIsInstance(actual._map, dict)
self.assertFalse(actual._synced.is_set())
async def test_map_downlink_model_establish_downlink(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model.node_uri = 'dog'
downlink_model.lane_uri = 'bark'
downlink_model.connection = MockConnection()
# When
await downlink_model._establish_downlink()
# Then
self.assertEqual(1, len(downlink_model.connection.messages_sent))
self.assertEqual('@sync(node:dog,lane:bark)', downlink_model.connection.messages_sent[0])
async def test_map_downlink_model_receive_synced(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# When
await downlink_model._receive_synced()
# Then
self.assertTrue(downlink_model._synced.is_set())
async def test_map_downlink_model_receive_event_update_primitive(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
update_request = UpdateRequest('Elliot', 29)
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=update_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual('Elliot', mock_manager.update_key)
self.assertEqual(29, mock_manager.update_value_new)
self.assertEqual(Value.absent(), mock_manager.update_value_old)
async def test_map_downlink_model_receive_event_update_object(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
mock_manager.registered_classes['MockPerson'] = MockPerson
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
person = MockPerson(name='Elliot', age=29)
update_request = UpdateRequest(person, 'Hello')
event_message = _EventMessage(node_uri='baz', lane_uri='qux', body=update_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(person.name, mock_manager.update_key.name)
self.assertEqual(person.age, mock_manager.update_key.age)
self.assertEqual('Hello', mock_manager.update_value_new)
self.assertEqual(Value.absent(), mock_manager.update_value_old)
async def test_map_downlink_model_receive_event_update_primitive_existing(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
downlink_model._map['Elliot'] = ('Elliot', 11)
update_request = UpdateRequest('Elliot', 29)
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=update_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual('Elliot', mock_manager.update_key)
self.assertEqual(29, mock_manager.update_value_new)
self.assertEqual(11, mock_manager.update_value_old)
async def test_map_downlink_model_receive_event_update_object_existing(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
mock_manager.registered_classes['MockPerson'] = MockPerson
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
person = MockPerson(name='Elliot', age=29)
downlink_model._map['@MockPerson{name:Elliot,age:29}'] = (person, 'bar')
update_request = UpdateRequest(person, 'Hello')
event_message = _EventMessage(node_uri='baz', lane_uri='qux', body=update_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(person.name, mock_manager.update_key.name)
self.assertEqual(person.age, mock_manager.update_key.age)
self.assertEqual('Hello', mock_manager.update_value_new)
self.assertEqual('bar', mock_manager.update_value_old)
async def test_map_downlink_model_receive_event_remove_primitive(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
downlink_model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
remove_request = RemoveRequest('b')
event_message = _EventMessage(node_uri='baz', lane_uri='qux', body=remove_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual('b', mock_manager.remove_key)
self.assertEqual(2, mock_manager.remove_old_value)
async def test_map_downlink_model_receive_event_remove_object(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
first_person = MockPerson(name='Foo', age=1)
second_person = MockPerson(name='Bar', age=2)
downlink_model._map = {'@MockPerson{name:Foo,age:1}': (first_person, 'a'),
'@MockPerson{name:Bar,age:2}': (second_person, 'b')}
remove_request = RemoveRequest(first_person)
event_message = _EventMessage(node_uri='baz', lane_uri='qux', body=remove_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual(first_person.name, mock_manager.remove_key.name)
self.assertEqual(first_person.age, mock_manager.remove_key.age)
self.assertEqual('a', mock_manager.remove_old_value)
async def test_map_downlink_model_receive_event_remove_missing(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
# noinspection PyTypeChecker
mock_manager = MockDownlinkManager()
downlink_model.downlink_manager = mock_manager
downlink_model._synced.set()
remove_request = RemoveRequest('b')
event_message = _EventMessage(node_uri='baz', lane_uri='qux', body=remove_request.to_record())
# When
await downlink_model._receive_event(event_message)
# Then
self.assertEqual(1, mock_manager.called)
self.assertEqual('b', mock_manager.remove_key)
self.assertEqual(Value.absent(), mock_manager.remove_old_value)
async def test_map_downlink_model_send_message(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model.connection = MockConnection.get_mock_connection()
downlink_model.connection.owner = downlink_model
downlink_model.linked.set()
update_request = UpdateRequest('Elliot', 29)
event_message = _EventMessage(node_uri='foo', lane_uri='bar', body=update_request.to_record())
# When
await downlink_model._send_message(event_message)
# Then
self.assertEqual('@event(node:foo,lane:bar)@update(key:Elliot)29',
MockConnection.get_mock_connection().messages_sent[0])
async def test_map_downlink_model_get_value_with_key(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model._synced.set()
downlink_model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
# When
actual = await downlink_model._get_value('b')
# Then
self.assertEqual(2, actual)
async def test_map_downlink_model_get_value_with_key_missing(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model._synced.set()
downlink_model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
# When
actual = await downlink_model._get_value('f')
# Then
self.assertEqual(Value.absent(), actual)
async def test_map_downlink_model_get_values_primitive(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model._synced.set()
downlink_model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
# When
actual = await downlink_model._get_values()
# Then
self.assertIsInstance(actual, list)
self.assertEqual(5, len(actual))
self.assertEqual('a', actual[0][0])
self.assertEqual(1, actual[0][1])
self.assertEqual('b', actual[1][0])
self.assertEqual(2, actual[1][1])
self.assertEqual('c', actual[2][0])
self.assertEqual(3, actual[2][1])
self.assertEqual('d', actual[3][0])
self.assertEqual(4, actual[3][1])
self.assertEqual('e', actual[4][0])
self.assertEqual(5, actual[4][1])
async def test_map_downlink_model_get_values_objects(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model._synced.set()
first_person = MockPerson(name='Foo', age=1)
second_person = MockPerson(name='Bar', age=2)
downlink_model._map = {'@MockPerson{name:Foo,age:1}': (first_person, 'a'),
'@MockPerson{name:Bar,age:2}': (second_person, 'b')}
# When
actual = await downlink_model._get_values()
# Then
self.assertIsInstance(actual, list)
self.assertEqual(2, len(actual))
self.assertEqual(first_person, actual[0][0])
self.assertEqual('a', actual[0][1])
self.assertEqual(second_person, actual[1][0])
self.assertEqual('b', actual[1][1])
async def test_map_downlink_model_get_values_empty(self):
# Given
with SwimClient() as client:
downlink_model = _MapDownlinkModel(client)
downlink_model._synced.set()
# When
actual = await downlink_model._get_values()
# Then
self.assertIsInstance(actual, list)
self.assertEqual(0, len(actual))
async def test_map_downlink_view(self):
# Given
with SwimClient() as client:
# When
actual = _MapDownlinkView(client)
# Then
self.assertIsInstance(actual, _MapDownlinkView)
self.assertIsInstance(actual, _DownlinkView)
self.assertEqual(client, actual._client)
self.assertFalse(actual._is_open)
self.assertFalse(actual.strict)
self.assertIsNone(actual._did_update_callback)
self.assertIsNone(actual._did_remove_callback)
self.assertFalse(actual._initialised.is_set())
async def test_map_downlink_view_register_manager_first_time(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
connection = MockConnection()
downlink_model = _ValueDownlinkModel(client)
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
manager.downlink_model = downlink_model
# When
await downlink_view._register_manager(manager)
# Then
self.assertTrue(downlink_view._initialised.is_set())
self.assertEqual(downlink_model, downlink_view._model)
self.assertEqual(manager, downlink_view._downlink_manager)
async def test_map_downlink_view_register_manager_already_existing(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
mock_did_update = MockDidUpdateCallback()
downlink_view.did_update(mock_did_update.execute)
downlink_model = _MapDownlinkModel(client)
downlink_model._map = {'a': ('a', 1)}
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
manager.downlink_model = downlink_model
manager.status = _DownlinkManagerStatus.OPEN
# When
await downlink_view._register_manager(manager)
while not mock_did_update.called:
pass
# Then
self.assertTrue(downlink_view._initialised.is_set())
self.assertEqual(downlink_model, downlink_view._model)
self.assertEqual(manager, downlink_view._downlink_manager)
self.assertTrue(mock_did_update.called)
self.assertEqual('a', mock_did_update.key)
self.assertEqual(1, mock_did_update.new_value)
self.assertEqual(Value.absent(), mock_did_update.old_value)
async def test_map_downlink_view_create_downlink_model(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
host_uri = 'ws://127.0.0.1'
node_uri = 'map_node'
lane_uri = 'map_lane'
downlink_view._host_uri = host_uri
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
connection = MockConnection()
# noinspection PyTypeChecker
manager = _DownlinkManager(connection)
# When
actual = await downlink_view._create_downlink_model(manager)
# Then
self.assertIsInstance(actual, _MapDownlinkModel)
self.assertEqual(client, actual.client)
self.assertEqual(host_uri, actual.host_uri)
self.assertEqual(node_uri, actual.node_uri)
self.assertEqual(lane_uri, actual.lane_uri)
self.assertEqual(manager, actual.downlink_manager)
async def test_map_downlink_view_map_no_model(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
key = 'foo'
# When
actual = downlink_view._map(key)
# Then
self.assertEqual(Value.absent(), actual)
async def test_map_downlink_view_map_key_existing(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
model = _MapDownlinkModel(client)
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
key = 'c'
downlink_view._model = model
# When
actual = downlink_view._map(key)
# Then
self.assertEqual(3, actual)
async def test_map_downlink_view_map_key_missing(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
model = _MapDownlinkModel(client)
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
key = 'n'
downlink_view._model = model
# When
actual = downlink_view._map(key)
# Then
self.assertEqual(Value.absent(), actual)
async def test_map_downlink_view_map_all(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
model = _MapDownlinkModel(client)
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
downlink_view._model = model
# When
actual = downlink_view._map(None)
# Then
self.assertEqual(5, len(actual))
self.assertEqual('a', actual[0][0])
self.assertEqual(1, actual[0][1])
self.assertEqual('b', actual[1][0])
self.assertEqual(2, actual[1][1])
self.assertEqual('c', actual[2][0])
self.assertEqual(3, actual[2][1])
self.assertEqual('d', actual[3][0])
self.assertEqual(4, actual[3][1])
self.assertEqual('e', actual[4][0])
self.assertEqual(5, actual[4][1])
async def test_map_downlink_view_get_immediate(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
model = _MapDownlinkModel(client)
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
downlink_view._model = model
# When
actual = downlink_view.get('a')
# Then
self.assertEqual(1, actual)
async def test_map_downlink_view_get_with_wait(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
downlink_view._initialised.set()
model = _MapDownlinkModel(client)
model._synced.set()
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
downlink_view._model = model
# When
actual = downlink_view.get('d', wait_sync=True)
# Then
self.assertEqual(4, actual)
async def test_map_downlink_view_get_immediate_absent(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
model = _MapDownlinkModel(client)
downlink_view._model = model
# When
actual = downlink_view.get('a')
# Then
self.assertEqual(Value.absent(), actual)
async def test_map_downlink_view_get_with_wait_absent(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
downlink_view._initialised.set()
model = _MapDownlinkModel(client)
model._synced.set()
downlink_view._model = model
# When
actual = downlink_view.get('d', wait_sync=True)
# Then
self.assertEqual(Value.absent(), actual)
@patch('warnings.warn')
async def test_map_downlink_view_get_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
# When
downlink_view.get('a')
# Then
self.assertEqual('Cannot execute "get" before the downlink has been opened!', mock_warn.call_args_list[0][0][0])
async def test_map_downlink_view_get_all_immediate(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
model = _MapDownlinkModel(client)
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
downlink_view._model = model
# When
actual = downlink_view.get_all()
# Then
self.assertEqual('a', actual[0][0])
self.assertEqual(1, actual[0][1])
self.assertEqual('b', actual[1][0])
self.assertEqual(2, actual[1][1])
self.assertEqual('c', actual[2][0])
self.assertEqual(3, actual[2][1])
self.assertEqual('d', actual[3][0])
self.assertEqual(4, actual[3][1])
self.assertEqual('e', actual[4][0])
self.assertEqual(5, actual[4][1])
async def test_map_downlink_view_get_all_with_wait(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
downlink_view._initialised.set()
model = _MapDownlinkModel(client)
model._synced.set()
model._map = {'a': ('a', 1), 'b': ('b', 2), 'c': ('c', 3), 'd': ('d', 4), 'e': ('e', 5)}
downlink_view._model = model
# When
actual = downlink_view.get_all(wait_sync=True)
# Then
self.assertEqual('a', actual[0][0])
self.assertEqual(1, actual[0][1])
self.assertEqual('b', actual[1][0])
self.assertEqual(2, actual[1][1])
self.assertEqual('c', actual[2][0])
self.assertEqual(3, actual[2][1])
self.assertEqual('d', actual[3][0])
self.assertEqual(4, actual[3][1])
self.assertEqual('e', actual[4][0])
self.assertEqual(5, actual[4][1])
async def test_map_downlink_view_get_all_immediate_absent(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
model = _MapDownlinkModel(client)
downlink_view._model = model
# When
actual = downlink_view.get_all()
# Then
self.assertEqual([], actual)
async def test_map_downlink_view_get_all_with_wait_absent(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
downlink_view._is_open = True
downlink_view._initialised.set()
model = _MapDownlinkModel(client)
model._synced.set()
downlink_view._model = model
# When
actual = downlink_view.get_all(wait_sync=True)
# Then
self.assertEqual([], actual)
@patch('warnings.warn')
async def test_map_downlink_view_get_all_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
# When
downlink_view.get_all()
# Then
self.assertEqual('Cannot execute "get_all" before the downlink has been opened!',
mock_warn.call_args_list[0][0][0])
@patch('concurrent.futures._base.Future.result')
async def test_map_downlink_view_put_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'map_node_uri'
lane_uri = 'map_lane_uri'
model = _MapDownlinkModel(client)
mock_connection = MockConnection()
model.connection = mock_connection
model.linked.set()
downlink_view = _MapDownlinkView(client)
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
downlink_view._initialised.set()
downlink_view._model = model
# When
downlink_view.put('map_key', 'map_value', blocking=True)
# Then
self.assertEqual('@command(node:map_node_uri,lane:map_lane_uri)@update(key:map_key)map_value',
mock_connection.messages_sent[0])
self.assertTrue(mock_result.called)
@patch('concurrent.futures._base.Future.result')
async def test_map_downlink_view_put_non_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'node_map'
lane_uri = 'lane_map'
model = _MapDownlinkModel(client)
mock_connection = MockConnection()
model.connection = mock_connection
model.linked.set()
downlink_view = _MapDownlinkView(client)
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
downlink_view._initialised.set()
downlink_view._model = model
# When
downlink_view.put('key_map', 'value_map')
# Then
self.assertEqual('@command(node:node_map,lane:lane_map)@update(key:key_map)value_map',
mock_connection.messages_sent[0])
self.assertFalse(mock_result.called)
@patch('warnings.warn')
async def test_map_downlink_view_put_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
# When
downlink_view.put('key_map', 'value_map')
# Then
self.assertEqual('Cannot execute "put" before the downlink has been opened!',
mock_warn.call_args_list[0][0][0])
@patch('concurrent.futures._base.Future.result')
async def test_map_downlink_view_remove_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'map_remove_node_uri'
lane_uri = 'map_remove_lane_uri'
model = _MapDownlinkModel(client)
mock_connection = MockConnection()
model.connection = mock_connection
model.linked.set()
downlink_view = _MapDownlinkView(client)
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
downlink_view._initialised.set()
downlink_view._model = model
# When
downlink_view.remove('map_remove_key', blocking=True)
# Then
self.assertEqual('@command(node:map_remove_node_uri,lane:map_remove_lane_uri)@remove(key:map_remove_key)',
mock_connection.messages_sent[0])
self.assertTrue(mock_result.called)
@patch('concurrent.futures._base.Future.result')
async def test_map_downlink_view_remove_non_blocking(self, mock_result):
# Given
with SwimClient() as client:
node_uri = 'node_uri_remove_map'
lane_uri = 'lane_uri_remove_map'
model = _MapDownlinkModel(client)
mock_connection = MockConnection()
model.connection = mock_connection
model.linked.set()
downlink_view = _MapDownlinkView(client)
downlink_view._node_uri = node_uri
downlink_view._lane_uri = lane_uri
downlink_view._is_open = True
downlink_view._initialised.set()
downlink_view._model = model
# When
downlink_view.remove('remove_key_map')
# Then
self.assertEqual('@command(node:node_uri_remove_map,lane:lane_uri_remove_map)@remove(key:remove_key_map)',
mock_connection.messages_sent[0])
self.assertFalse(mock_result.called)
@patch('warnings.warn')
async def test_map_downlink_view_remove_before_open(self, mock_warn):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
# When
downlink_view.remove('key_map_remove')
# Then
self.assertEqual('Cannot execute "remove" before the downlink has been opened!',
mock_warn.call_args_list[0][0][0])
async def test_map_downlink_view_execute_did_update(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
mock_did_update = MockDidUpdateCallback()
downlink_view._did_update_callback = mock_did_update.execute
key = 'Test_update_key'
new_value = 'Test_update_new_value'
old_value = 'Test_update_old_value'
# When
await downlink_view._execute_did_update(key, new_value, old_value)
while not mock_did_update.called:
pass
# Then
self.assertEqual(key, mock_did_update.key)
self.assertEqual(new_value, mock_did_update.new_value)
self.assertEqual(old_value, mock_did_update.old_value)
async def test_map_downlink_view_execute_did_update_no_callback(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
key = 'Test_update_key'
new_value = 'Test_update_new_value'
old_value = 'Test_update_old_value'
# When
with patch('swimai.SwimClient._schedule_task') as mock_schedule_task:
await downlink_view._execute_did_update(key, new_value, old_value)
# Then
self.assertFalse(mock_schedule_task.called)
async def test_map_downlink_view_execute_did_remove(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
mock_did_remove = MockDidRemoveCallback()
downlink_view._did_remove_callback = mock_did_remove.execute
key = 'Test_remove_key'
value = 'Test_remove_value'
# When
await downlink_view._execute_did_remove(key, value)
while not mock_did_remove.called:
pass
# Then
self.assertEqual(key, mock_did_remove.key)
self.assertEqual(value, mock_did_remove.value)
async def test_map_downlink_view_execute_did_remove_no_callback(self):
# Given
with SwimClient() as client:
downlink_view = _MapDownlinkView(client)
key = 'Test_remove_key'
value = 'Test_remove_value'
# When
with patch('swimai.SwimClient._schedule_task') as mock_schedule_task:
await downlink_view._execute_did_remove(key, value)
# Then
self.assertFalse(mock_schedule_task.called)
async def test_map_downlink_view_did_update_valid(self):
# Given
client = SwimClient()
downlink_view = _MapDownlinkView(client)
function = mock_did_update_callback
# When
downlink_view.did_update(function)
# Then
self.assertTrue(function, downlink_view._did_update_callback)
async def test_map_downlink_view_did_update_invalid(self):
# Given
client = SwimClient()
downlink_view = _MapDownlinkView(client)
function = 111
# When
with self.assertRaises(TypeError) as error:
# noinspection PyTypeChecker
downlink_view.did_update(function)
# Then
message = error.exception.args[0]
self.assertEqual(message, 'Callback must be a coroutine or a function!')
async def test_map_downlink_view_did_remove_valid(self):
# Given
client = SwimClient()
downlink_view = _MapDownlinkView(client)
function = mock_did_remove_callback
# When
downlink_view.did_remove(function)
# Then
self.assertTrue(function, downlink_view._did_remove_callback)
async def test_map_downlink_view_did_remove_invalid(self):
# Given
client = SwimClient()
downlink_view = _MapDownlinkView(client)
function = 111
# When
with self.assertRaises(TypeError) as error:
# noinspection PyTypeChecker
downlink_view.did_remove(function)
# Then
message = error.exception.args[0]
self.assertEqual(message, 'Callback must be a coroutine or a function!')
| 40.940196
| 120
| 0.647145
| 10,403
| 95,841
| 5.628473
| 0.030856
| 0.061483
| 0.026847
| 0.042457
| 0.907964
| 0.886137
| 0.860178
| 0.832494
| 0.798422
| 0.769901
| 0
| 0.00731
| 0.264928
| 95,841
| 2,340
| 121
| 40.957692
| 0.823818
| 0.035914
| 0
| 0.7543
| 0
| 0.000614
| 0.053065
| 0.01878
| 0
| 0
| 0
| 0
| 0.240786
| 1
| 0.000614
| false
| 0.014742
| 0.006757
| 0
| 0.007985
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eef0635ad7bd9b021d6d06d0eca503f970a93c79
| 5,072
|
py
|
Python
|
Chapter08-old/Chapter08_HousingPrices_LinearRegression.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | 7
|
2019-11-11T13:17:04.000Z
|
2021-01-18T22:09:44.000Z
|
Chapter08-old/Chapter08_HousingPrices_LinearRegression.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | null | null | null |
Chapter08-old/Chapter08_HousingPrices_LinearRegression.py
|
PacktWorkshops/The-Spark-Workshop
|
f5b052b67d3aaf805eb48c9958c0dcfd237cc841
|
[
"MIT"
] | 12
|
2020-04-20T16:23:51.000Z
|
2021-07-07T20:37:45.000Z
|
#!/usr/bin/env python
# create a spark session
import pyspark
spark_context = pyspark.SparkContext()
spark_session = pyspark.sql.SparkSession(spark_context)
# load csv file into dataframe
df = spark_session.read.csv("boston_housing.csv", header=False, inferSchema=True)
df.show()
# rename the columns to be more descriptive
df = df.selectExpr("_c0 as crim", "_c1 as zn", "_c2 as indux", "_c3 as chas", "_c4 as nox", "_c5 as rm", "_c6 as age", "_c7 as dis", "_c8 as rad", "_c9 as tax", "_c10 as ptratio", "_c11 as b", "_c12 as lstat", "_c13 as price")
df.printSchema()
# transform into a dataframe suitable for machine learning
from pyspark.ml.feature import VectorAssembler
vectorAssembler = VectorAssembler( inputCols = ['crim', 'zn', 'indux', 'chas', 'nox', 'rm', 'age', 'dis', 'rad', 'tax', 'ptratio', 'b', 'lstat'], outputCol = 'features' )
mldf = vectorAssembler.transform(df)
mldf = mldf.select(['features','price'])
mldf.show(5)
# Split the data into a training and test dataframe
train_df, test_df = mldf.randomSplit([0.8, 0.2],seed=5)
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=1)
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features").show()
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=50)
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features")
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=100, regParam=1, elasticNetParam=1 )
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features")
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=50, regParam=0.2, elasticNetParam=0.85 )
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features")
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=100, regParam=0.2, elasticNetParam=0.85 )
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features")
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
# Split the data into a training and test dataframe
train_df, test_df = mldf.randomSplit([0.8, 0.2],seed=20)
# Train user linear regression
from pyspark.ml.regression import LinearRegression
lr = LinearRegression(featuresCol='features', labelCol='price', maxIter=100, regParam=0.2, elasticNetParam=0.85 )
lr_model = lr.fit(train_df)
# Do some predictions on the hold-out, test set
predictions = lr_model.transform(test_df)
predictions.select("prediction","price","features")
print("Training R Squared: %f" % lr_model.summary.r2)
from pyspark.ml.evaluation import RegressionEvaluator
evaluator = RegressionEvaluator( predictionCol="prediction", labelCol="price", metricName="r2" )
print("Test R Squared: %g" % evaluator.evaluate(predictions))
| 42.621849
| 302
| 0.741522
| 653
| 5,072
| 5.678407
| 0.199081
| 0.033981
| 0.045577
| 0.040453
| 0.819579
| 0.819579
| 0.819579
| 0.819579
| 0.819579
| 0.819579
| 0
| 0.016617
| 0.133872
| 5,072
| 118
| 303
| 42.983051
| 0.827453
| 0.142153
| 0
| 0.720588
| 0
| 0
| 0.183329
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.205882
| 0
| 0.205882
| 0.191176
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eef18bbd7b582a4664ae72f31f52c05e1d7262db
| 8,594
|
py
|
Python
|
tests/PicoStoreRemove.py
|
Aerex/PicoStore
|
7b7f02171ec1f586440744406c6d2da07c1b358d
|
[
"Apache-2.0"
] | null | null | null |
tests/PicoStoreRemove.py
|
Aerex/PicoStore
|
7b7f02171ec1f586440744406c6d2da07c1b358d
|
[
"Apache-2.0"
] | null | null | null |
tests/PicoStoreRemove.py
|
Aerex/PicoStore
|
7b7f02171ec1f586440744406c6d2da07c1b358d
|
[
"Apache-2.0"
] | null | null | null |
from nose.tools import *
import pdb
import sys
sys.path.insert(0, "../")
from picostore import PicoStore
#Remove
# Here's our "unit tests".
class Counter:
"""
A helper class with some side effects
we can test. Need to compensate for silent assertion
"""
def __init__(self):
self.count = 0
def tick(self):
self.count += 1
def messages(self,obj):
self.message.append(obj)
def value(self):
return self.count
def getMessage(self,index):
return self.message[0]
def testSimpleRemove():
PicoStoreInstance = PicoStore()
successCounter = Counter()
failCounter = Counter()
collection = {
'customers' : {
'searchFields' : {'name': 'string', 'age': 'integer'}
}
}
data = [
{'name': 'Matsuura', 'age': 1},
{'name': 'Nobuyuki', 'age': 2},
{'name': 'Saito', 'age': 3}
]
query = [
{'equal': [{'name': 'Matsuura'}]}
]
def fail(err):
failCounter.tick()
print 'Failed with ' + err
def successFindAll2(res):
assert_equals(len(res), 2)
successCounter.tick()
assert_equals(res[0]['_json']['name'], 'Matsuura')
successCounter.tick()
assert_equals(res[0]['_json']['age'], 1)
successCounter.tick()
assert_equals(res[1]['_json']['name'], 'Nobuyuki')
successCounter.tick()
assert_equals(res[1]['_json']['age'], 2)
successCounter.tick()
assert_equals(res[2]['_json']['name'], 'Saito')
successCounter.tick()
assert_equals(res[2]['_json']['age'], 3)
successCounter.tick()
successCounter.tick()
def successRemove(res):
assert_equals(res, 1)
successCounter.tick()
PicoStoreInstance.get('customers').findAll().then(lambda res: successFindAll2(res), lambda err: fail(err))
def successFindAll1(res):
assert_equals(len(res), 3)
successCounter.tick()
assert_equals(res[0]['_json']['name'], 'Matsuura')
successCounter.tick()
assert_equals(res[0]['_json']['age'], 1)
successCounter.tick()
assert_equals(res[1]['_json']['name'], 'Nobuyuki')
successCounter.tick()
assert_equals(res[1]['_json']['age'], 2)
successCounter.tick()
assert_equals(res[2]['_json']['name'], 'Saito')
successCounter.tick()
assert_equals(res[2]['_json']['age'], 3)
successCounter.tick()
PicoStoreInstance.get('customers').remove(query).then(lambda res: successRemove(res), lambda err: fail(err))
def successAdd(res):
assert_equals(res, 3)
successCounter.tick()
PicoStoreInstance.get('customers').findAll().then(lambda res: successFindAll1(res), lambda err: fail(err))
def successInit(res):
assert_equals(res['customers'].name, 'customers')
successCounter.tick()
PicoStoreInstance.get('customers').add(data).then(lambda res: successAdd(res), lambda err: fail(err))
def successDestroy(res):
assert_equals(res, 0)
successCounter.tick()
PicoStoreInstance.init(collection).then(lambda res: successInit(res), lambda err: fail(err))
PicoStoreInstance.destroy().then(lambda res: successDestroy(res), lambda err: fail(err))
assert_equals(12, successCounter.value())
assert_equals(0, failCounter.value())
def testRemoveWithMultipleSearchFields():
PicoStoreInstance = PicoStore()
successCounter = Counter()
failCounter = Counter()
collection = {
'customers' : {
'searchFields' : {'name' : 'string', 'gpa': 'number', 'ssn': 'integer', 'active': 'boolean'}
}
}
data = [
{'name': 'shin', 'gpa': 1.0, 'ssn': 123, 'active': True},
{'name': 'shu', 'gpa': 2.2, 'ssn': 345, 'active': False},
{'name': 'masao', 'gpa': 2.4, 'ssn': 567, 'active': True},
{'name': 'saito', 'gpa': 3.6, 'ssn': 789, 'active': False},
{'name': 'aiko', 'gpa': 4.8, 'ssn': 890, 'active': True}
]
query = [
{'like': [{'gpa': '.2'}]}
]
def fail(err):
failCounter.tick()
print 'Failed with ' + err
def successFindAll2(res):
assert_equals(len(res), 4)
successCounter.tick()
assert_equals(res[0]['_json']['name'], 'shin')
successCounter.tick()
assert_equals(res[0]['_json']['gpa'], 1.0)
successCounter.tick()
assert_equals(res[0]['_json']['ssn'], 123)
successCounter.tick()
assert_equals(res[0]['_json']['active'], True)
successCounter.tick()
assert_equals(res[1]['_json']['name'], 'masao')
successCounter.tick()
assert_equals(res[1]['_json']['gpa'], 2.4)
successCounter.tick()
assert_equals(res[1]['_json']['ssn'], 345)
successCounter.tick()
assert_equals(res[1]['_json']['active'], True)
successCounter.tick()
assert_equals(res[2]['_json']['name'], 'saito')
successCounter.tick()
assert_equals(res[2]['_json']['gpa'], 3.6)
successCounter.tick()
assert_equals(res[2]['_json']['ssn'], 789)
successCounter.tick()
assert_equals(res[2]['_json']['active'], False)
successCounter.tick()
assert_equals(res[3]['_json']['name'], 'aiko')
successCounter.tick()
assert_equals(res[3]['_json']['gpa'], 4.8)
successCounter.tick()
assert_equals(res[3]['_json']['ssn'], 890)
successCounter.tick()
assert_equals(res[3]['_json']['active'], True)
successCounter.tick()
def successRemove(res):
assert_equals(res, 1)
successCounter.tick()
PicoStoreInstance.get('customers').findAll().then(lambda res: successFindAll2(res), lambda err: fail(err))
def successFindAll1(res):
assert_equals(len(res), 5)
successCounter.tick()
assert_equals(res[0]['_json']['name'], 'shin')
successCounter.tick()
assert_equals(res[0]['_json']['gpa'], 1.0)
successCounter.tick()
assert_equals(res[0]['_json']['ssn'], 123)
successCounter.tick()
assert_equals(res[0]['_json']['active'], True)
successCounter.tick()
assert_equals(res[1]['_json']['name'], 'shu')
successCounter.tick()
assert_equals(res[1]['_json']['gpa'], 2.2)
successCounter.tick()
assert_equals(res[1]['_json']['ssn'], 345)
successCounter.tick()
assert_equals(res[1]['_json']['active'], False)
successCounter.tick()
assert_equals(res[2]['_json']['name'], 'masao')
successCounter.tick()
assert_equals(res[2]['_json']['gpa'], 2.4)
successCounter.tick()
assert_equals(res[2]['_json']['ssn'], 567)
successCounter.tick()
assert_equals(res[2]['_json']['active'], True)
successCounter.tick()
assert_equals(res[3]['_json']['name'], 'saito')
successCounter.tick()
assert_equals(res[3]['_json']['gpa'], 3.6)
successCounter.tick()
assert_equals(res[3]['_json']['ssn'], 789)
successCounter.tick()
assert_equals(res[3]['_json']['active'], False)
successCounter.tick()
assert_equals(res[4]['_json']['name'], 'aiko')
successCounter.tick()
assert_equals(res[4]['_json']['gpa'], 4.8)
successCounter.tick()
assert_equals(res[4]['_json']['ssn'], 890)
successCounter.tick()
assert_equals(res[4]['_json']['active'], True)
successCounter.tick()
PicoStoreInstance.get('customers').remove(query).then(lambda res: successRemove(res), lambda err: fail(err))
def successAdd(res):
assert_equals(res, 5)
successCounter.tick()
PicoStoreInstance.get('customers').findAll().then(lambda res: successFindAll1(res), lambda err: fail(err))
def successInit(res):
assert_equals(res['customers'].name, 'customers')
successCounter.tick()
PicoStoreInstance.get('customers').add(data).then(lambda res: successAdd(res), lambda err: fail(err))
def successDestroy(res):
assert_equals(res, 0)
successCounter.tick()
PicoStoreInstance.init(collection).then(lambda res: successInit(res), lambda err: fail(err))
PicoStoreInstance.destroy().then(lambda res: successDestroy(res), lambda err: fail(err))
assert_equals(32, successCounter.value())
assert_equals(0, failCounter.value())
| 32.067164
| 116
| 0.586688
| 926
| 8,594
| 5.319654
| 0.11987
| 0.155907
| 0.170524
| 0.292326
| 0.861551
| 0.858709
| 0.857897
| 0.836784
| 0.713358
| 0.654283
| 0
| 0.023853
| 0.239004
| 8,594
| 267
| 117
| 32.187266
| 0.729358
| 0.003607
| 0
| 0.652174
| 0
| 0
| 0.108348
| 0
| 0
| 0
| 0
| 0
| 0.309179
| 0
| null | null | 0
| 0.019324
| null | null | 0.009662
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e10a72fe19298e1ea94f587697b7bd20db5d53e8
| 28
|
py
|
Python
|
blueprints/home/__init__.py
|
duyvu1991/license-plate-detection-
|
e42681f50fa8175df6907d19e3b5564674504ac5
|
[
"Net-SNMP",
"Xnet"
] | null | null | null |
blueprints/home/__init__.py
|
duyvu1991/license-plate-detection-
|
e42681f50fa8175df6907d19e3b5564674504ac5
|
[
"Net-SNMP",
"Xnet"
] | null | null | null |
blueprints/home/__init__.py
|
duyvu1991/license-plate-detection-
|
e42681f50fa8175df6907d19e3b5564674504ac5
|
[
"Net-SNMP",
"Xnet"
] | null | null | null |
from .blueprint import home
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
e127dcd3b2dcc2261539696321d8b1b41d803ddc
| 36,313
|
py
|
Python
|
fuelweb_test/tests/test_services.py
|
Fiware/ops.Fuel-main-dev
|
779ffdcc9630d780777c60270fdc2f8baf87750a
|
[
"Apache-2.0"
] | null | null | null |
fuelweb_test/tests/test_services.py
|
Fiware/ops.Fuel-main-dev
|
779ffdcc9630d780777c60270fdc2f8baf87750a
|
[
"Apache-2.0"
] | null | null | null |
fuelweb_test/tests/test_services.py
|
Fiware/ops.Fuel-main-dev
|
779ffdcc9630d780777c60270fdc2f8baf87750a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import division
from devops.helpers.helpers import wait
from proboscis import asserts
from proboscis import SkipTest
from proboscis import test
from proboscis.asserts import assert_equal
from fuelweb_test.helpers import checkers
from fuelweb_test.helpers.common import Common
from fuelweb_test.helpers.decorators import log_snapshot_on_error
from fuelweb_test.helpers import os_actions
from fuelweb_test import settings
from fuelweb_test import logger as LOGGER
from fuelweb_test.tests.base_test_case import SetupEnvironment
from fuelweb_test.tests.base_test_case import TestBasic
@test(groups=["services", "services.sahara", "services_ha_one_controller"])
class SaharaHAOneController(TestBasic):
"""Sahara ha with 1 controller tests.
Don't recommend to start tests without kvm
Put Sahara image before start
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_sahara_ha_one_controller_gre"])
@log_snapshot_on_error
def deploy_sahara_ha_one_controller_gre(self):
"""Deploy cluster in ha mode with 1 controller Sahara and Neutron GRE
Scenario:
1. Create a Fuel cluster. Set the option for Sahara installation
2. Add 1 node with "controller" role
3. Add 1 node with "compute" role
4. Deploy the Fuel cluster
5. Verify Sahara service on controller
6. Run all sanity and smoke tests
7. Register Vanilla2 image for Sahara
8. Run platform Vanilla2 test for Sahara
Duration 65m
Snapshot: deploy_sahara_ha_one_controller_gre
"""
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_REDHAT:
raise SkipTest()
LOGGER.debug('Check MD5 sum of Vanilla2 image')
check_image = checkers.check_image(
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_MD5,
settings.SERVTEST_LOCAL_PATH)
asserts.assert_true(check_image)
self.env.revert_snapshot("ready_with_3_slaves")
LOGGER.debug('Create Fuel cluster for Sahara tests')
data = {
'sahara': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'saharaSimple',
'user': 'saharaSimple',
'password': 'saharaSimple'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
os_conn = os_actions.OpenStackActions(
self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=5, networks_count=2, timeout=300)
LOGGER.debug('Verify Sahara service on controller')
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='sahara-all')
LOGGER.debug('Run all sanity and smoke tests')
path_to_tests = 'fuel_health.tests.sanity.test_sanity_sahara.'
test_names = ['VanillaTwoTemplatesTest.test_vanilla_two_templates',
'HDPTwoTemplatesTest.test_hdp_two_templates']
self.fuel_web.run_ostf(
cluster_id=self.fuel_web.get_last_created_cluster(),
tests_must_be_passed=[path_to_tests + test_name
for test_name in test_names]
)
LOGGER.debug('Import Vanilla2 image for Sahara')
common_func = Common(
self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'], data['tenant'])
common_func.image_import(
settings.SERVTEST_LOCAL_PATH,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_NAME,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_META)
path_to_tests = 'fuel_health.tests.platform_tests.test_sahara.'
test_names = ['VanillaTwoClusterTest.test_vanilla_two_cluster']
for test_name in test_names:
LOGGER.debug('Run platform test {0} for Sahara'.format(test_name))
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=path_to_tests + test_name, timeout=60 * 200)
self.env.make_snapshot("deploy_sahara_ha_one_controller_gre")
@test(groups=["services", "services.sahara", "services_ha"])
class SaharaHA(TestBasic):
"""Sahara HA tests.
Don't recommend to start tests without kvm
Put Sahara image before start
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["deploy_sahara_ha_gre"])
@log_snapshot_on_error
def deploy_sahara_ha_gre(self):
"""Deploy cluster in HA mode with Sahara and Neutron GRE
Scenario:
1. Create a Fuel cluster. Set the option for Sahara installation
2. Add 3 node with "controller" role
3. Add 1 node with "compute" role
4. Deploy the Fuel cluster
5. Verify Sahara service on all controllers
6. Run all sanity and smoke tests
7. Register Vanilla2 image for Sahara
8. Run platform Vanilla2 test for Sahara
Duration 130m
Snapshot: deploy_sahara_ha_gre
"""
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_REDHAT:
raise SkipTest()
LOGGER.debug('Check MD5 sum of Vanilla2 image')
check_image = checkers.check_image(
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_MD5,
settings.SERVTEST_LOCAL_PATH)
asserts.assert_true(check_image)
self.env.revert_snapshot("ready_with_5_slaves")
LOGGER.debug('Create Fuel cluster for Sahara tests')
data = {
'sahara': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'saharaHA',
'user': 'saharaHA',
'password': 'saharaHA'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['controller'],
'slave-03': ['controller'],
'slave-04': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
cluster_vip = self.fuel_web.get_public_vip(cluster_id)
os_conn = os_actions.OpenStackActions(
cluster_vip, data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=13, networks_count=2, timeout=300)
LOGGER.debug('Verify Sahara service on all controllers')
for slave in ["slave-01", "slave-02", "slave-03"]:
checkers.verify_service(
self.env.get_ssh_to_remote_by_name(slave),
service_name='sahara-all')
LOGGER.debug('Run all sanity and smoke tests')
path_to_tests = 'fuel_health.tests.sanity.test_sanity_sahara.'
test_names = ['VanillaTwoTemplatesTest.test_vanilla_two_templates',
'HDPTwoTemplatesTest.test_hdp_two_templates']
self.fuel_web.run_ostf(
cluster_id=self.fuel_web.get_last_created_cluster(),
tests_must_be_passed=[path_to_tests + test_name
for test_name in test_names]
)
LOGGER.debug('Import Vanilla2 image for Sahara')
common_func = Common(cluster_vip,
data['user'], data['password'], data['tenant'])
common_func.image_import(
settings.SERVTEST_LOCAL_PATH,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_NAME,
settings.SERVTEST_SAHARA_VANILLA_2_IMAGE_META)
path_to_tests = 'fuel_health.tests.platform_tests.test_sahara.'
test_names = ['VanillaTwoClusterTest.test_vanilla_two_cluster']
for test_name in test_names:
LOGGER.debug('Run platform test {0} for Sahara'.format(test_name))
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=path_to_tests + test_name, timeout=60 * 200)
self.env.make_snapshot("deploy_sahara_ha_gre")
@test(groups=["services", "services.murano", "services_ha_one_controller"])
class MuranoHAOneController(TestBasic):
"""Murano HA with 1 controller tests.
Don't recommend to start tests without kvm
Put Murano image before start
Murano OSTF platform tests without Internet connection will be failed
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_murano_ha_one_controller_gre"])
@log_snapshot_on_error
def deploy_murano_ha_one_controller_gre(self):
"""Deploy cluster in HA mode with Murano and Neutron GRE
Scenario:
1. Create cluster. Set install Murano option
2. Add 1 node with controller role
3. Add 1 nodes with compute role
4. Deploy the cluster
5. Verify Murano services
6. Run OSTF
7. Register Murano image
8. Run OSTF Murano platform tests
Duration 40m
Snapshot: deploy_murano_ha_one_controller_gre
"""
if settings.OPENSTACK_RELEASE == settings.OPENSTACK_RELEASE_REDHAT:
raise SkipTest()
self.env.revert_snapshot("ready_with_3_slaves")
LOGGER.debug('Check MD5 of image')
check_image = checkers.check_image(
settings.SERVTEST_MURANO_IMAGE,
settings.SERVTEST_MURANO_IMAGE_MD5,
settings.SERVTEST_LOCAL_PATH)
asserts.assert_true(check_image, "Image verification failed")
data = {
'murano': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'muranoSimple',
'user': 'muranoSimple',
'password': 'muranoSimple'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
os_conn = os_actions.OpenStackActions(
self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=5, networks_count=2, timeout=300)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='murano-api')
common_func = Common(self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'],
data['tenant'])
LOGGER.debug('Run sanity and functional Murano OSTF tests')
self.fuel_web.run_single_ostf_test(
cluster_id=self.fuel_web.get_last_created_cluster(),
test_sets=['sanity'],
test_name=('fuel_health.tests.sanity.test_sanity_murano.'
'MuranoSanityTests.test_create_and_delete_service')
)
LOGGER.debug('Import Murano image')
common_func.image_import(
settings.SERVTEST_LOCAL_PATH,
settings.SERVTEST_MURANO_IMAGE,
settings.SERVTEST_MURANO_IMAGE_NAME,
settings.SERVTEST_MURANO_IMAGE_META)
LOGGER.debug('Boot instance with Murano image')
image_name = settings.SERVTEST_MURANO_IMAGE_NAME
srv = common_func.create_instance(flavor_name='test_murano_flavor',
ram=2048, vcpus=1, disk=20,
server_name='murano_instance',
image_name=image_name,
neutron_network=True)
wait(lambda: common_func.get_instance_detail(srv).status == 'ACTIVE',
timeout=60 * 60)
common_func.delete_instance(srv)
LOGGER.debug('Run OSTF platform tests')
test_class_main = ('fuel_health.tests.platform_tests'
'.test_murano_linux.MuranoDeployLinuxServicesTests')
tests_names = ['test_deploy_apache_service', ]
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 36)
self.env.make_snapshot("deploy_murano_ha_one_controller_gre")
@test(groups=["services", "services.murano", "services_ha"])
class MuranoHA(TestBasic):
"""Murano HA tests.
Don't recommend to start tests without kvm
Put Murano image before start
Murano OSTF platform tests without Internet connection will be failed
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["deploy_murano_ha_with_gre"])
@log_snapshot_on_error
def deploy_murano_ha_with_gre(self):
"""Deploy cluster in ha mode with Murano and Neutron GRE
Scenario:
1. Create cluster. Set install Murano option
2. Add 3 node with controller role
3. Add 1 nodes with compute role
4. Deploy the cluster
5. Verify Murano services
6. Run OSTF
7. Register Murano image
8. Run OSTF Murano platform tests
Duration 100m
Snapshot: deploy_murano_ha_with_gre
"""
self.env.revert_snapshot("ready_with_5_slaves")
LOGGER.debug('Check MD5 of image')
check_image = checkers.check_image(
settings.SERVTEST_MURANO_IMAGE,
settings.SERVTEST_MURANO_IMAGE_MD5,
settings.SERVTEST_LOCAL_PATH)
asserts.assert_true(check_image, "Image verification failed")
data = {
'murano': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'muranoHA',
'user': 'muranoHA',
'password': 'muranoHA'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['controller'],
'slave-03': ['controller'],
'slave-04': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
cluster_vip = self.fuel_web.get_public_vip(cluster_id)
os_conn = os_actions.OpenStackActions(
cluster_vip, data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=13, networks_count=2, timeout=300)
for slave in ["slave-01", "slave-02", "slave-03"]:
checkers.verify_service(
self.env.get_ssh_to_remote_by_name(slave),
service_name='murano-api')
common_func = Common(cluster_vip, data['user'], data['password'],
data['tenant'])
LOGGER.debug('Run sanity and functional Murano OSTF tests')
self.fuel_web.run_single_ostf_test(
cluster_id=self.fuel_web.get_last_created_cluster(),
test_sets=['sanity'],
test_name=('fuel_health.tests.sanity.test_sanity_murano.'
'MuranoSanityTests.test_create_and_delete_service')
)
LOGGER.debug('Import Murano image')
common_func.image_import(
settings.SERVTEST_LOCAL_PATH,
settings.SERVTEST_MURANO_IMAGE,
settings.SERVTEST_MURANO_IMAGE_NAME,
settings.SERVTEST_MURANO_IMAGE_META)
LOGGER.debug('Boot instance with Murano image')
image_name = settings.SERVTEST_MURANO_IMAGE_NAME
srv = common_func.create_instance(flavor_name='test_murano_flavor',
ram=2048, vcpus=1, disk=20,
server_name='murano_instance',
image_name=image_name,
neutron_network=True)
wait(lambda: common_func.get_instance_detail(srv).status == 'ACTIVE',
timeout=60 * 60)
common_func.delete_instance(srv)
LOGGER.debug('Run OSTF platform tests')
test_class_main = ('fuel_health.tests.platform_tests'
'.test_murano_linux.MuranoDeployLinuxServicesTests')
tests_names = ['test_deploy_apache_service', ]
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 36)
self.env.make_snapshot("deploy_murano_ha_with_gre")
class CeilometerOSTFTestsRun(TestBasic):
def run_tests(self, cluster_id):
"""Method run smoke, sanity and platform Ceilometer tests."""
LOGGER.debug('Run sanity and smoke tests')
self.fuel_web.run_ostf(
cluster_id=cluster_id,
test_sets=['smoke', 'sanity'],
timeout=60 * 10
)
LOGGER.debug('Run platform OSTF Ceilometer tests')
test_class_main = ('fuel_health.tests.platform_tests.'
'test_ceilometer.'
'CeilometerApiPlatformTests')
tests_names = ['test_check_alarm_state',
'test_create_sample']
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 20)
@test(groups=["services", "services.ceilometer", "services_ha_one_controller"])
class CeilometerHAOneControllerMongo(CeilometerOSTFTestsRun):
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_ceilometer_ha_one_controller_with_mongo"])
@log_snapshot_on_error
def deploy_ceilometer_ha_one_controller_with_mongo(self):
"""Deploy cluster in HA mode with Ceilometer
Scenario:
1. Create cluster. Set install Ceilometer option
2. Add 1 node with controller role
3. Add 1 nodes with compute role
4. Add 1 node with cinder role
5. Add 1 node with mongo role
6. Deploy the cluster
7. Verify ceilometer api is running
8. Run OSTF
Duration 45m
Snapshot: deploy_ceilometer_ha_one_controller_with_mongo
"""
self.env.revert_snapshot("ready_with_3_slaves")
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings={
'ceilometer': True,
'tenant': 'ceilometerSimple',
'user': 'ceilometerSimple',
'password': 'ceilometerSimple'
}
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['compute', 'cinder'],
'slave-03': ['mongo']
}
)
nailgun_nodes = self.fuel_web.client.list_cluster_nodes(cluster_id)
disk_mb = 0
for node in nailgun_nodes:
if node.get('pending_roles') == ['mongo']:
disk_mb = self.fuel_web.get_node_disk_size(node.get('id'),
"vda")
LOGGER.debug('disk size is {0}'.format(disk_mb))
mongo_disk_mb = 11116
os_disk_mb = disk_mb - mongo_disk_mb
mongo_disk_gb = ("{0}G".format(round(mongo_disk_mb / 1024, 1)))
disk_part = {
"vda": {
"os": os_disk_mb,
"mongo": mongo_disk_mb
}
}
for node in nailgun_nodes:
if node.get('pending_roles') == ['mongo']:
self.fuel_web.update_node_disk(node.get('id'), disk_part)
self.fuel_web.deploy_cluster_wait(cluster_id)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
partitions = checkers.get_mongo_partitions(
self.env.get_ssh_to_remote_by_name("slave-03"), "vda5")
assert_equal(partitions[0].rstrip(), mongo_disk_gb,
'Mongo size {0} before deployment is not equal'
' to size after {1}'.format(mongo_disk_gb, partitions))
self.run_tests(cluster_id)
self.env.make_snapshot(
"deploy_ceilometer_ha_one_controller_with_mongo")
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_ceilometer_ha_one_controller_multirole"])
@log_snapshot_on_error
def deploy_ceilometer_ha_one_controller_multirole(self):
"""Deploy cluster in ha multirole mode with Ceilometer
Scenario:
1. Create cluster. Set install Ceilometer option
2. Add 1 node with controller role
3. Add 1 nodes with compute role
4. Add 2 nodes with cinder and mongo roles
5. Deploy the cluster
6. Verify ceilometer api is running
7. Run OSTF
Duration 35m
Snapshot: deploy_ceilometer_ha_one_controller_multirole
"""
self.env.revert_snapshot("ready_with_3_slaves")
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings={
'ceilometer': True
}
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['compute'],
'slave-03': ['cinder', 'mongo']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
self.run_tests(cluster_id)
self.env.make_snapshot("deploy_ceilometer_ha_one_controller_mulirole")
@test(groups=["services", "services.ceilometer", "services_ha"])
class CeilometerHAMongo(CeilometerOSTFTestsRun):
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["deploy_ceilometer_ha_with_mongo"])
@log_snapshot_on_error
def deploy_ceilometer_ha_with_mongo(self):
"""Deploy cluster in ha mode with Ceilometer
Scenario:
1. Create cluster. Set install Ceilometer option
2. Add 3 node with controller role
3. Add 1 nodes with compute role
4. Add 1 node with mongo role
5. Deploy the cluster
6. Verify ceilometer api is running
7. Run OSTF
Duration 65m
Snapshot: deploy_ceilometer_ha_with_mongo
"""
self.env.revert_snapshot("ready_with_5_slaves")
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings={
'ceilometer': True,
'tenant': 'ceilometerHA',
'user': 'ceilometerHA',
'password': 'ceilometerHA'
}
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller'],
'slave-02': ['controller'],
'slave-03': ['controller'],
'slave-04': ['compute'],
'slave-05': ['mongo']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
self.run_tests(cluster_id)
self.env.make_snapshot("deploy_ceilometer_ha_with_mongo")
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["deploy_ceilometer_ha_multirole"])
@log_snapshot_on_error
def deploy_ceilometer_ha_multirole(self):
"""Deploy cluster in ha multirole mode with Ceilometer
Scenario:
1. Create cluster. Set install Ceilometer option
2. Add 3 node with controller and mongo roles
3. Add 1 nodes with compute role
4. Add 1 nodes with cinder
5. Deploy the cluster
6. Verify ceilometer api is running
7. Run OSTF
Duration 80m
Snapshot: deploy_ceilometer_ha_multirole
"""
self.env.revert_snapshot("ready_with_5_slaves")
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings={
'ceilometer': True
}
)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller', 'mongo'],
'slave-02': ['controller', 'mongo'],
'slave-03': ['controller', 'mongo'],
'slave-04': ['compute'],
'slave-05': ['cinder']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
self.run_tests(cluster_id)
self.env.make_snapshot("deploy_ceilometer_ha_mulirole")
@test(groups=["services", "services.heat", "services_ha_one_controller"])
class HeatHAOneController(TestBasic):
"""Heat HA one controller test.
Don't recommend to start tests without kvm
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_heat_ha_one_controller_neutron"])
@log_snapshot_on_error
def deploy_heat_ha_one_controller_neutron(self):
"""Deploy Heat cluster in HA mode with Neutron GRE
Scenario:
1. Create cluster
2. Add 1 node with controller role and mongo
3. Add 1 nodes with compute role
4. Set install Ceilometer option
5. Deploy the cluster
6. Verify Heat, Ceilometer services
7. Run OSTF platform tests
Duration 40m
Snapshot: deploy_heat_ha_one_controller_neutron
"""
self.env.revert_snapshot("ready_with_3_slaves")
data = {
'ceilometer': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'heatSimple',
'user': 'heatSimple',
'password': 'heatSimple'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller', 'mongo'],
'slave-02': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
os_conn = os_actions.OpenStackActions(
self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=5, networks_count=2, timeout=300)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='heat-api', count=3)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
LOGGER.debug('Run Heat OSTF platform tests')
test_class_main = ('fuel_health.tests.platform_tests.'
'test_heat.'
'HeatSmokeTests')
tests_names = ['test_actions',
'test_autoscaling',
'test_rollback',
'test_update']
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 60)
self.env.make_snapshot("deploy_heat_ha_one_controller_neutron")
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_heat_ha_one_controller_nova"])
@log_snapshot_on_error
def deploy_heat_ha_one_controller_nova(self):
"""Deploy Heat cluster in ha mode with Nova Network
Scenario:
1. Create cluster
2. Add 1 node with controller role and mongo
3. Add 1 nodes with compute role
4. Set Ceilometer install option
4. Deploy the cluster
5. Verify Heat, Ceilometer services
6. Run OSTF platform tests
Duration 40m
Snapshot: deploy_heat_ha_one_controller_nova
"""
self.env.revert_snapshot("ready_with_3_slaves")
data = {
'ceilometer': True,
'tenant': 'heatSimple',
'user': 'heatSimple',
'password': 'heatSimple'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller', 'mongo'],
'slave-02': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
os_conn = os_actions.OpenStackActions(
self.fuel_web.get_public_vip(cluster_id),
data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=6, networks_count=1, timeout=300)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='heat-api', count=3)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name("slave-01"),
service_name='ceilometer-api')
LOGGER.debug('Run Heat OSTF platform tests')
test_class_main = ('fuel_health.tests.platform_tests.'
'test_heat.'
'HeatSmokeTests')
tests_names = ['test_actions',
'test_autoscaling',
'test_rollback',
'test_update']
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 60)
self.env.make_snapshot("deploy_heat_ha_one_controller_nova")
@test(groups=["services", "services.heat", "services_ha"])
class HeatHA(TestBasic):
"""Heat HA test.
Don't recommend to start tests without kvm
"""
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["deploy_heat_ha"])
@log_snapshot_on_error
def deploy_heat_ha(self):
"""Deploy Heat cluster in HA mode
Scenario:
1. Create cluster
2. Add 3 node with controller role and mongo
3. Add 1 nodes with compute role
4. Set Ceilometer install option
4. Deploy the cluster
5. Verify Heat and Ceilometer services
6. Run OSTF platform tests
Duration 70m
Snapshot: deploy_heat_ha
"""
self.env.revert_snapshot("ready_with_5_slaves")
data = {
'ceilometer': True,
'net_provider': 'neutron',
'net_segment_type': 'gre',
'tenant': 'heatHA',
'user': 'heatHA',
'password': 'heatHA'
}
cluster_id = self.fuel_web.create_cluster(
name=self.__class__.__name__,
mode=settings.DEPLOYMENT_MODE,
settings=data)
self.fuel_web.update_nodes(
cluster_id,
{
'slave-01': ['controller', 'mongo'],
'slave-02': ['controller', 'mongo'],
'slave-03': ['controller', 'mongo'],
'slave-04': ['compute']
}
)
self.fuel_web.deploy_cluster_wait(cluster_id)
cluster_vip = self.fuel_web.get_public_vip(cluster_id)
os_conn = os_actions.OpenStackActions(
cluster_vip, data['user'], data['password'], data['tenant'])
self.fuel_web.assert_cluster_ready(
os_conn, smiles_count=13, networks_count=2, timeout=300)
for slave in ["slave-01", "slave-02", "slave-03"]:
checkers.verify_service(
self.env.get_ssh_to_remote_by_name(slave),
service_name='heat-api', count=3)
checkers.verify_service(
self.env.get_ssh_to_remote_by_name(slave),
service_name='ceilometer-api')
LOGGER.debug('Run Heat OSTF platform tests')
test_class_main = ('fuel_health.tests.platform_tests.'
'test_heat.'
'HeatSmokeTests')
tests_names = ['test_actions',
'test_rollback']
test_classes = []
for test_name in tests_names:
test_classes.append('{0}.{1}'.format(test_class_main,
test_name))
for test_name in test_classes:
self.fuel_web.run_single_ostf_test(
cluster_id=cluster_id, test_sets=['platform_tests'],
test_name=test_name, timeout=60 * 60)
self.env.make_snapshot("deploy_heat_ha")
| 36.495477
| 79
| 0.59816
| 4,096
| 36,313
| 4.983643
| 0.074707
| 0.030863
| 0.037182
| 0.01019
| 0.885073
| 0.869397
| 0.858081
| 0.834811
| 0.809141
| 0.797286
| 0
| 0.01746
| 0.312312
| 36,313
| 994
| 80
| 36.532193
| 0.799976
| 0.154022
| 0
| 0.71275
| 0
| 0
| 0.19319
| 0.05997
| 0
| 0
| 0
| 0
| 0.021505
| 1
| 0.018433
| false
| 0.033794
| 0.033794
| 0
| 0.066052
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0109dab47769eb0e8ef9f7fb3272914cabc36f2b
| 228
|
py
|
Python
|
output/models/ibm_data/instance_invalid/s3_4_2_4/s3_4_2_4ii06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ibm_data/instance_invalid/s3_4_2_4/s3_4_2_4ii06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ibm_data/instance_invalid/s3_4_2_4/s3_4_2_4ii06_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ibm_data.instance_invalid.s3_4_2_4.s3_4_2_4ii06_xsd.s3_4_2_4ii06 import C1
from output.models.ibm_data.instance_invalid.s3_4_2_4.s3_4_2_4ii06_xsd.s3_4_2_4ii06b import Root
__all__ = [
"C1",
"Root",
]
| 28.5
| 96
| 0.807018
| 47
| 228
| 3.319149
| 0.361702
| 0.115385
| 0.153846
| 0.173077
| 0.75641
| 0.75641
| 0.75641
| 0.75641
| 0.75641
| 0.75641
| 0
| 0.165854
| 0.100877
| 228
| 7
| 97
| 32.571429
| 0.595122
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
011ff4edd156912e2addf065a78d2ad5f8e8508b
| 111
|
py
|
Python
|
testproject/tesapp_link/models.py
|
rouxcode/django-text-ckeditor
|
dd8f86a6ffcbde8d1b85fc6e70d2653dd65b2737
|
[
"MIT"
] | null | null | null |
testproject/tesapp_link/models.py
|
rouxcode/django-text-ckeditor
|
dd8f86a6ffcbde8d1b85fc6e70d2653dd65b2737
|
[
"MIT"
] | null | null | null |
testproject/tesapp_link/models.py
|
rouxcode/django-text-ckeditor
|
dd8f86a6ffcbde8d1b85fc6e70d2653dd65b2737
|
[
"MIT"
] | null | null | null |
from text_ckeditor.text_ckeditor_links.models import AbstractLink
class CKEditorLink(AbstractLink):
pass
| 18.5
| 65
| 0.837838
| 13
| 111
| 6.923077
| 0.769231
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117117
| 111
| 5
| 66
| 22.2
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
01753bbf8edd97bb1e032df913d15518d1361f28
| 3,860
|
py
|
Python
|
tests/test_basic.py
|
welhefna/PSSM
|
3a10917c30b4a2e720e2f5c36cee7f8252093880
|
[
"MIT"
] | null | null | null |
tests/test_basic.py
|
welhefna/PSSM
|
3a10917c30b4a2e720e2f5c36cee7f8252093880
|
[
"MIT"
] | null | null | null |
tests/test_basic.py
|
welhefna/PSSM
|
3a10917c30b4a2e720e2f5c36cee7f8252093880
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import doctest
from pprint import pprint
from context import pssm as pssm_reader
def basic_pssm():
"""Basic PSSM Parser Function
>>> p = pssm_reader.PSSMParser("/home/welhefna/scope2.06New/fold2vector/database/PSSM/4INKA.pssm")
>>> pprint (p.sequence_to_pssm('VV'))
[{'aa_sequence': ['V', 'V'],
'index': [27, 28],
'ipp': [1.33, 1.58],
'lpssm': [[-1.0,
-6.0,
-6.0,
-7.0,
-4.0,
-6.0,
-6.0,
-7.0,
-6.0,
6.0,
0.0,
-6.0,
1.0,
0.0,
-6.0,
-5.0,
-4.0,
-6.0,
-4.0,
5.0],
[-4.0,
-6.0,
-7.0,
-7.0,
-5.0,
-6.0,
-7.0,
-7.0,
-7.0,
7.0,
-1.0,
-6.0,
-2.0,
1.0,
-6.0,
-5.0,
-3.0,
2.0,
-2.0,
4.0]],
'rwgrmt': ['inf', 'inf'],
'woprd': [[4.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
45.0,
8.0,
0.0,
3.0,
4.0,
0.0,
0.0,
0.0,
0.0,
0.0,
37.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
60.0,
3.0,
0.0,
0.0,
5.0,
0.0,
1.0,
1.0,
3.0,
1.0,
27.0]]},
{'aa_sequence': ['V', 'V'],
'index': [80, 81],
'ipp': [1.16, 1.03],
'lpssm': [[-2.0,
-6.0,
-6.0,
-7.0,
-4.0,
-6.0,
-6.0,
-6.0,
-6.0,
2.0,
3.0,
-6.0,
0.0,
1.0,
-6.0,
-3.0,
-4.0,
-5.0,
-3.0,
6.0],
[-2.0,
-5.0,
-6.0,
-6.0,
-4.0,
-5.0,
-6.0,
-6.0,
-6.0,
4.0,
4.0,
-5.0,
1.0,
-3.0,
-6.0,
-5.0,
0.0,
-5.0,
-4.0,
3.0]],
'rwgrmt': ['inf', 'inf'],
'woprd': [[3.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
11.0,
24.0,
0.0,
1.0,
6.0,
0.0,
1.0,
0.0,
0.0,
1.0,
53.0],
[3.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
21.0,
44.0,
0.0,
2.0,
0.0,
0.0,
0.0,
7.0,
0.0,
0.0,
24.0]]},
{'aa_sequence': ['V', 'V'],
'index': [149, 150],
'ipp': [0.47, 1.2],
'lpssm': [[4.0,
-3.0,
-3.0,
0.0,
-3.0,
-3.0,
-2.0,
-2.0,
0.0,
0.0,
-3.0,
-1.0,
-3.0,
0.0,
1.0,
3.0,
-1.0,
-4.0,
0.0,
-2.0],
[0.0,
-6.0,
-5.0,
-6.0,
-4.0,
-5.0,
-6.0,
-6.0,
-6.0,
6.0,
-2.0,
-5.0,
-2.0,
1.0,
-5.0,
-4.0,
3.0,
-5.0,
-4.0,
2.0]],
'rwgrmt': ['inf', 'inf'],
'woprd': [[39.0,
1.0,
0.0,
5.0,
0.0,
0.0,
2.0,
1.0,
2.0,
6.0,
1.0,
4.0,
0.0,
4.0,
6.0,
22.0,
2.0,
0.0,
4.0,
2.0],
[8.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
54.0,
0.0,
0.0,
0.0,
6.0,
0.0,
0.0,
18.0,
0.0,
0.0,
14.0]]}]
>>> p = pssm_reader.PSSMParser("/home/welhefna/scope2.06New/fold2vector/database/PSSM/4INKA.pssm")
>>> print len(p.sequence_to_pssm('VV'))
3
"""
if __name__ == '__main__':
doctest.testmod()
| 14.036364
| 99
| 0.261399
| 599
| 3,860
| 1.652755
| 0.12187
| 0.331313
| 0.40303
| 0.412121
| 0.751515
| 0.526263
| 0.489899
| 0.319192
| 0.319192
| 0.319192
| 0
| 0.293348
| 0.52487
| 3,860
| 274
| 100
| 14.087591
| 0.246456
| 1.315026
| 0
| 0
| 0
| 0
| 0.051613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0
| 0.666667
| 0.166667
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
6d971a014a050946531e35c22b8fe2d5624cab16
| 182
|
py
|
Python
|
nlcpy/statistics/__init__.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | 11
|
2020-07-31T02:21:55.000Z
|
2022-03-10T03:12:11.000Z
|
nlcpy/statistics/__init__.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
nlcpy/statistics/__init__.py
|
SX-Aurora/nlcpy
|
0a53eec8778073bc48b12687b7ce37ab2bf2b7e0
|
[
"BSD-3-Clause"
] | null | null | null |
from nlcpy.statistics import order # NOQA
from nlcpy.statistics import average # NOQA
from nlcpy.statistics import correlating # NOQA
from nlcpy.statistics import histograms # NOQA
| 36.4
| 47
| 0.818681
| 24
| 182
| 6.208333
| 0.375
| 0.241611
| 0.510067
| 0.671141
| 0.583893
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137363
| 182
| 4
| 48
| 45.5
| 0.949045
| 0.104396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
09f2f5e3c319e6b224c0275cf1a5600de82d24c9
| 11,408
|
py
|
Python
|
tests/test_rosimport/subtests/test_rosmsg_generator_exec.py
|
asmodehn/rosimport
|
c63e4769650b1cf19f23fbaa65a356ffae20a536
|
[
"MIT"
] | 5
|
2017-11-11T18:26:28.000Z
|
2019-06-12T08:47:58.000Z
|
tests/test_rosimport/subtests/test_rosmsg_generator_exec.py
|
asmodehn/rosimport
|
c63e4769650b1cf19f23fbaa65a356ffae20a536
|
[
"MIT"
] | 8
|
2017-06-30T08:28:46.000Z
|
2017-07-18T04:50:18.000Z
|
tests/test_rosimport/subtests/test_rosmsg_generator_exec.py
|
pyros-dev/rosimport
|
c63e4769650b1cf19f23fbaa65a356ffae20a536
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import tempfile
import unittest
"""
Testing executing rosmsg_generator directly (like setup.py would use it)
without relying on the import mechanism at all (careful with dependencies)
"""
import os
import sys
import runpy
import pkg_resources
import importlib
import site
from ._utils import load_from_path
from rosimport import genrosmsg_py, genrossrv_py
class TestGenerateBasic(unittest.TestCase):
def test_generate_msgpkg_usable(self):
""" Testing the generation for a msg package. """
tmpsitedir = tempfile.mkdtemp('rosimport_subtests_site')
# generating message class
sitedir, generated_msg_code = genrosmsg_py(
[os.path.join(os.path.dirname(__file__), 'msg', 'SubTestMsg.msg')],
package='subtest_gen_msgs.subtests',
sitedir=tmpsitedir
)
assert sitedir == tmpsitedir
assert generated_msg_code == os.path.join(sitedir, 'subtest_gen_msgs', 'subtests', 'msg', '__init__.py')
# Python code was generated properly
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_msgs', 'subtests', 'msg')), os.path.join(sitedir, 'subtest_gen_msgs', 'subtest', 'msg')
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_msgs', 'subtests', 'msg', '__init__.py')), os.path.join(sitedir, 'subtest_gen_msgs', 'subtest', 'msg', '__init__.py')
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_msgs', 'subtests', 'msg', '_SubTestMsg.py')), os.path.join(sitedir, 'subtest_gen_msgs', 'subtest', 'msg', '_SubTestMsg.py')
# TODO : test the most basic way to import
# msgs_mod = load_from_path('subtest_gen_msgs.subtests', generated_msg_code)
# assert msgs_mod is not None
# assert hasattr(msgs_mod, 'TestMsg')
# assert msgs_mod.TestMsg._type == 'subtest_gen_msgs/TestMsg'
def test_generate_srvpkg_usable(self):
""" Testing our generated srv package is importable.
Note this test require filefinder2 on python2 for passing."""
tmpsitedir = tempfile.mkdtemp('rosimport_subtests_site')
# generating message class
sitedir, generated_srv_code = genrossrv_py(
[os.path.join(os.path.dirname(__file__), 'srv', 'SubTestSrv.srv')],
package='subtest_gen_srvs.subtests',
sitedir=tmpsitedir
)
assert sitedir == tmpsitedir
assert generated_srv_code == os.path.join(sitedir, 'subtest_gen_srvs', 'subtests', 'srv', '__init__.py')
# Python code was generated properly
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_srvs', 'subtests', 'srv')), os.path.join(sitedir, 'subtest_gen_srvs', 'subtest', 'srv')
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_srvs', 'subtests', 'srv', '__init__.py')), os.path.join(sitedir, 'subtest_gen_srvs', 'subtest', 'srv', '__init__.py')
assert os.path.exists(os.path.join(sitedir, 'subtest_gen_srvs', 'subtests', 'srv', '_SubTestSrv.py')), os.path.join(sitedir, 'subtest_gen_srvs', 'subtest', 'srv', '_SubTestSrv.py')
# TODO : test the most basic way to import
# subsrvs_mod = load_from_path('subtest_gen_srvs.subtests', generated_srv_code)
# assert subsrvs_mod is not None
# assert hasattr(subsrvs_mod, 'SubTestSrv')
# assert subsrvs_mod.SubTestSrv._type == 'subtest_gen_srvs/SubTestSrv'
#
# assert hasattr(subsrvs_mod, 'SubTestSrvRequest')
# assert subsrvs_mod.SubTestSrvRequest._type == 'subtest_gen_srvs/SubTestSrvRequest'
#
# assert hasattr(subsrvs_mod, 'SubTestSrvResponse')
# assert subsrvs_mod.SubTestSrvResponse._type == 'subtest_gen_srvs/SubTestSrvResponse'
class TestGenerateWithDeps(unittest.TestCase):
def test_generate_msgpkg_deps_usable(self):
""" Testing our generated msg package is importable.
Note this test require filefinder2 on python2 for passing.
We also depend on import feature here, since we need a message module
to be able to import from another message module for dependencies,
so the generation of the message cannot be independent from the import system."""
tmpsitedir = tempfile.mkdtemp('rosimport_subtests_site')
# generating message class
search_path = {} # to accumulate generated modules
sitedir_deps, generated_msg_code = genrosmsg_py(
[
# Dependencies first !
os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'msg', 'TestRosMsg.msg'),
os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'msg', 'TestRosMsgDeps.msg'),
],
package='subtest_gen_msgs_deps',
sitedir=tmpsitedir,
search_path=search_path,
)
sitedir, generated_submsg_code = genrosmsg_py(
[
os.path.join(os.path.dirname(__file__), 'msg', 'SubTestMsgDeps.msg'),
],
package='subtest_gen_msgs_deps.subtests',
sitedir=tmpsitedir, # we reuse the previous site dir to make sure we end up in the same directory hierarchy
search_path=search_path,
)
assert sitedir == sitedir_deps == tmpsitedir
assert generated_msg_code == os.path.join(sitedir, 'subtest_gen_msgs_deps', 'msg', '__init__.py')
assert generated_submsg_code == os.path.join(sitedir, 'subtest_gen_msgs_deps', 'subtests', 'msg', '__init__.py')
# Python code was generated properly
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '__init__.py')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '__init__.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '_TestRosMsg.py')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '_TestRosMsg.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '_TestRosMsgDeps.py')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'msg', '_TestRosMsgDeps.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'subtests', 'msg', '__init__.py')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'subtests', 'msg', '__init__.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'subtests', 'msg', '_SubTestMsgDeps.py')), os.path.join(sitedir_deps, 'subtest_gen_msgs_deps', 'subtests', 'msg', '_SubTestMsgDeps.py')
# TODO : test the most basic way to import
# msgdeps_mod = load_from_path('subtest_gen_msgs_deps', generated_msg_code)
# assert msgdeps_mod is not None
# assert hasattr(msgdeps_mod, 'TestRosMsg')
# assert msgdeps_mod.TestRosMsg._type == 'test_gen_msgs/TestRosMsg'
# assert hasattr(msgdeps_mod, 'TestRosMsgDeps')
# assert msgdeps_mod.TestRosMsgDeps._type == 'test_gen_msgs/TestRosMsgDeps'
#
# msgs_mod = load_from_path('subtest_gen_msgs_deps.subtests', generated_submsg_code)
# assert msgs_mod is not None
# assert hasattr(msgs_mod, 'SubTestMsgDeps')
# assert msgs_mod.SubTestMsgDeps._type == 'subtest_gen_srvs/SubTestMsgDeps'
def test_generate_srvpkg_deps_usable(self):
""" Testing our generated srv package is importable.
Note this test require filefinder2 on python2 for passing.
We also depend on import feature here, since we need a message module
to be able to import from another message module for dependencies,
so the generation of the message cannot be independent from the import system."""
# generating message dependencies
search_path = {} # to accumulate generated modules
sitedir_deps, generated_msg_code = genrosmsg_py(
[
# Dependencies first !
os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'msg', 'TestRosMsg.msg'),
os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'msg', 'TestRosMsgDeps.msg'),
],
package='subtest_gen_srvs_deps',
sitedir=tempfile.mkdtemp('rosimport_subtests_site'),
search_path=search_path
)
# generating message dependencies
sitedir, generated_srv_code = genrossrv_py(
[
os.path.join(os.path.dirname(__file__), 'srv', 'SubTestSrvDeps.srv'),
],
package='subtest_gen_srvs_deps.subtests',
sitedir=sitedir_deps, # we reuse the previous site dir to make sure we end up in the same directory
search_path=search_path
)
assert sitedir == sitedir_deps
assert generated_msg_code == os.path.join(sitedir, 'subtest_gen_srvs_deps', 'msg', '__init__.py')
assert generated_srv_code == os.path.join(sitedir, 'subtest_gen_srvs_deps', 'subtests', 'srv', '__init__.py')
# Python code was generated properly
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '__init__.py')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '__init__.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '_TestRosMsg.py')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '_TestRosMsg.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '_TestRosMsgDeps.py')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'msg', '_TestRosMsgDeps.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'subtests', 'srv', '__init__.py')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'subtests', 'srv', '__init__.py')
assert os.path.exists(os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'subtests', 'srv', '_SubTestSrvDeps.py')), os.path.join(sitedir_deps, 'subtest_gen_srvs_deps', 'subtests', 'srv', '_SubTestSrvDeps.py')
# TODO : test the most basic way to import
# msgdeps_mod = load_from_path('subtest_gen_msgs_deps', generated_msg_code)
# assert msgdeps_mod is not None
# assert hasattr(msgdeps_mod, 'TestRosMsg')
# assert msgdeps_mod.TestRosMsg._type == 'test_gen_msgs/TestRosMsg'
# assert hasattr(msgdeps_mod, 'TestRosMsgDeps')
# assert msgdeps_mod.TestRosMsgDeps._type == 'test_gen_msgs/TestRosMsgDeps'
#
# subsrvs_mod = load_from_path('subtest_gen_srvs.subtests', generated_srv_code)
# assert subsrvs_mod is not None
# assert hasattr(subsrvs_mod, 'SubTestSrv')
# assert subsrvs_mod.SubTestSrv._type == 'subtest_gen_srvs/SubTestSrv'
#
# assert hasattr(subsrvs_mod, 'SubTestSrvRequest')
# assert subsrvs_mod.SubTestSrvRequest._type == 'subtest_gen_srvs/SubTestSrvRequest'
#
# assert hasattr(subsrvs_mod, 'SubTestSrvResponse')
# assert subsrvs_mod.SubTestSrvResponse._type == 'subtest_gen_srvs/SubTestSrvResponse'
| 56.475248
| 217
| 0.686185
| 1,432
| 11,408
| 5.143156
| 0.100559
| 0.068432
| 0.067889
| 0.096945
| 0.875356
| 0.855397
| 0.834216
| 0.834216
| 0.797828
| 0.761032
| 0
| 0.000655
| 0.196704
| 11,408
| 202
| 218
| 56.475248
| 0.803034
| 0.313552
| 0
| 0.287234
| 0
| 0
| 0.2625
| 0.110638
| 0
| 0
| 0
| 0.019802
| 0.297872
| 1
| 0.042553
| false
| 0
| 0.159574
| 0
| 0.223404
| 0.010638
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a242f5c57f6cc245f7672e93377da7de1a99c90
| 5,175
|
py
|
Python
|
tests/unit/records/schema/field/test_string_length_generator.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 36
|
2020-03-17T11:56:51.000Z
|
2022-01-19T16:03:32.000Z
|
tests/unit/records/schema/field/test_string_length_generator.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 60
|
2020-03-02T23:13:29.000Z
|
2021-05-19T15:05:42.000Z
|
tests/unit/records/schema/field/test_string_length_generator.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 4
|
2020-08-11T13:17:37.000Z
|
2021-11-05T21:11:52.000Z
|
import unittest
from mock import Mock
from records_mover.records.schema.field.string_length_generator import generate_string_length
class TestStringLengthGenerator(unittest.TestCase):
maxDiff = None
def test_generate_string_length_with_both(self):
test_cases = [
{
'driver': True,
'driver_varchar_length_is_in_chars': True,
'constraints_max_length_bytes': 123,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': True,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': 123,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': False,
'constraints_max_length_bytes': 123,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': False,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': 123,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 492
},
{
'driver': False,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 256
},
{
'driver': True,
'driver_varchar_length_is_in_chars': True,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': 123,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': True,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': 123,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': False,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': 123,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
{
'driver': True,
'driver_varchar_length_is_in_chars': False,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': 123,
'expected_string_length': 492
},
{
'driver': False,
'constraints_max_length_bytes': None,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': None,
'statistics_max_length_chars': None,
'expected_string_length': 256
},
{
'driver': False,
'constraints_max_length_bytes': 123,
'constraints_max_length_chars': None,
'statistics_max_length_bytes': 22,
'statistics_max_length_chars': None,
'expected_string_length': 123
},
]
for tc in test_cases:
if tc['driver']:
mock_driver = Mock(name='driver')
mock_driver.varchar_length_is_in_chars.return_value =\
tc['driver_varchar_length_is_in_chars']
else:
mock_driver = None
mock_constraints = Mock(name='constraints')
mock_statistics = Mock(name='statistics')
mock_constraints.max_length_bytes = tc['constraints_max_length_bytes']
mock_constraints.max_length_chars = tc['constraints_max_length_chars']
mock_statistics.max_length_bytes = tc['statistics_max_length_bytes']
mock_statistics.max_length_chars = tc['statistics_max_length_chars']
out = generate_string_length(mock_constraints, mock_statistics, mock_driver)
self.assertEqual(out, tc['expected_string_length'])
| 42.073171
| 93
| 0.554203
| 477
| 5,175
| 5.484277
| 0.100629
| 0.178899
| 0.198777
| 0.123853
| 0.735092
| 0.735092
| 0.713685
| 0.713685
| 0.713685
| 0.69419
| 0
| 0.018949
| 0.367729
| 5,175
| 122
| 94
| 42.418033
| 0.780562
| 0
| 0
| 0.613445
| 1
| 0
| 0.382609
| 0.363478
| 0
| 0
| 0
| 0
| 0.008403
| 1
| 0.008403
| false
| 0
| 0.02521
| 0
| 0.05042
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a350b1cddb45964a9b2c6f6bf0bf463a04444f8
| 92
|
py
|
Python
|
checkov/terraform/__init__.py
|
cclauss/checkov
|
60a385fcaff1499cf00c2d0018575fe5ab71f556
|
[
"Apache-2.0"
] | 4,013
|
2019-12-09T13:16:54.000Z
|
2022-03-31T14:31:01.000Z
|
checkov/terraform/__init__.py
|
cclauss/checkov
|
60a385fcaff1499cf00c2d0018575fe5ab71f556
|
[
"Apache-2.0"
] | 1,258
|
2019-12-17T09:55:51.000Z
|
2022-03-31T19:17:17.000Z
|
checkov/terraform/__init__.py
|
cclauss/checkov
|
60a385fcaff1499cf00c2d0018575fe5ab71f556
|
[
"Apache-2.0"
] | 638
|
2019-12-19T08:57:38.000Z
|
2022-03-30T21:38:37.000Z
|
from checkov.terraform.checks.resource import *
from checkov.terraform.checks.data import *
| 30.666667
| 47
| 0.826087
| 12
| 92
| 6.333333
| 0.583333
| 0.289474
| 0.526316
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 48
| 46
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
28ec5deeeaf75154fc5dba6507dadefbfa15f7c9
| 14,946
|
py
|
Python
|
build/lib/bandwidth/messaging/controllers/api_controller.py
|
Spaced-Out/python-bandwidth-sdk
|
5332f29d1c093003444384f63a9d4a00843c954f
|
[
"MIT"
] | null | null | null |
build/lib/bandwidth/messaging/controllers/api_controller.py
|
Spaced-Out/python-bandwidth-sdk
|
5332f29d1c093003444384f63a9d4a00843c954f
|
[
"MIT"
] | null | null | null |
build/lib/bandwidth/messaging/controllers/api_controller.py
|
Spaced-Out/python-bandwidth-sdk
|
5332f29d1c093003444384f63a9d4a00843c954f
|
[
"MIT"
] | 1
|
2020-12-01T15:25:51.000Z
|
2020-12-01T15:25:51.000Z
|
# -*- coding: utf-8 -*-
"""
bandwidth
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
from bandwidth.api_helper import APIHelper
from bandwidth.configuration import Server
from bandwidth.http.api_response import ApiResponse
from bandwidth.utilities.file_wrapper import FileWrapper
from bandwidth.messaging.controllers.base_controller import BaseController
from bandwidth.http.auth.messaging_basic_auth import MessagingBasicAuth
from bandwidth.messaging.models.media import Media
from bandwidth.messaging.models.bandwidth_message import BandwidthMessage
from bandwidth.messaging.exceptions.messaging_exception import MessagingException
class APIController(BaseController):
"""A Controller to access Endpoints in the bandwidth API."""
def __init__(self, config, call_back=None):
super(APIController, self).__init__(config, call_back)
def list_media(self,
user_id,
continuation_token=None):
"""Does a GET request to /users/{userId}/media.
listMedia
Args:
user_id (string): TODO: type description here.
continuation_token (string, optional): TODO: type description
here.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers.
successful operation
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/users/{userId}/media'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'userId': {'value': user_id, 'encode': True}
})
_query_builder = self.config.get_base_uri(Server.MESSAGINGDEFAULT)
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'Continuation-Token': continuation_token
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
MessagingBasicAuth.apply(self.config, _request)
_response = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _response.status_code == 400:
raise MessagingException('400 Request is malformed or invalid', _response)
elif _response.status_code == 401:
raise MessagingException('401 The specified user does not have access to the account', _response)
elif _response.status_code == 403:
raise MessagingException('403 The user does not have access to this API', _response)
elif _response.status_code == 404:
raise MessagingException('404 Path not found', _response)
elif _response.status_code == 415:
raise MessagingException('415 The content-type of the request is incorrect', _response)
elif _response.status_code == 429:
raise MessagingException('429 The rate limit has been reached', _response)
self.validate_response(_response)
decoded = APIHelper.json_deserialize(_response.text, Media.from_dictionary)
_result = ApiResponse(_response, body=decoded)
return _result
def get_media(self,
user_id,
media_id):
"""Does a GET request to /users/{userId}/media/{mediaId}.
getMedia
Args:
user_id (string): TODO: type description here.
media_id (string): TODO: type description here.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers.
successful operation
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/users/{userId}/media/{mediaId}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'userId': {'value': user_id, 'encode': True},
'mediaId': {'value': media_id, 'encode': True}
})
_query_builder = self.config.get_base_uri(Server.MESSAGINGDEFAULT)
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
_request = self.config.http_client.get(_query_url)
MessagingBasicAuth.apply(self.config, _request)
_response = self.execute_request(_request, binary=True)
# Endpoint and global error handling using HTTP status codes.
if _response.status_code == 400:
raise MessagingException('400 Request is malformed or invalid', _response)
elif _response.status_code == 401:
raise MessagingException('401 The specified user does not have access to the account', _response)
elif _response.status_code == 403:
raise MessagingException('403 The user does not have access to this API', _response)
elif _response.status_code == 404:
raise MessagingException('404 Path not found', _response)
elif _response.status_code == 415:
raise MessagingException('415 The content-type of the request is incorrect', _response)
elif _response.status_code == 429:
raise MessagingException('429 The rate limit has been reached', _response)
self.validate_response(_response)
decoded = _response.text
_result = ApiResponse(_response, body=decoded)
return _result
def upload_media(self,
user_id,
media_id,
content_length,
body,
content_type='application/octet-stream',
cache_control=None):
"""Does a PUT request to /users/{userId}/media/{mediaId}.
uploadMedia
Args:
user_id (string): TODO: type description here.
media_id (string): TODO: type description here.
content_length (long|int): TODO: type description here.
body (typing.BinaryIO): TODO: type description here.
content_type (string, optional): TODO: type description here.
Example: application/octet-stream
cache_control (string, optional): TODO: type description here.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/users/{userId}/media/{mediaId}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'userId': {'value': user_id, 'encode': True},
'mediaId': {'value': media_id, 'encode': True}
})
_query_builder = self.config.get_base_uri(Server.MESSAGINGDEFAULT)
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
if isinstance(body, FileWrapper):
body_wrapper = body.file_stream
body_content_type = body.content_type
else:
body_wrapper = body
body_content_type = content_type
# Prepare headers
_headers = {
'content-type': body_content_type,
'Content-Length': content_length,
'Cache-Control': cache_control
}
# Prepare and execute request
_request = self.config.http_client.put(_query_url, headers=_headers, parameters=body_wrapper)
MessagingBasicAuth.apply(self.config, _request)
_response = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _response.status_code == 400:
raise MessagingException('400 Request is malformed or invalid', _response)
elif _response.status_code == 401:
raise MessagingException('401 The specified user does not have access to the account', _response)
elif _response.status_code == 403:
raise MessagingException('403 The user does not have access to this API', _response)
elif _response.status_code == 404:
raise MessagingException('404 Path not found', _response)
elif _response.status_code == 415:
raise MessagingException('415 The content-type of the request is incorrect', _response)
elif _response.status_code == 429:
raise MessagingException('429 The rate limit has been reached', _response)
self.validate_response(_response)
# Return appropriate type
return ApiResponse(_response)
def delete_media(self,
user_id,
media_id):
"""Does a DELETE request to /users/{userId}/media/{mediaId}.
deleteMedia
Args:
user_id (string): TODO: type description here.
media_id (string): TODO: type description here.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/users/{userId}/media/{mediaId}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'userId': {'value': user_id, 'encode': True},
'mediaId': {'value': media_id, 'encode': True}
})
_query_builder = self.config.get_base_uri(Server.MESSAGINGDEFAULT)
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
_request = self.config.http_client.delete(_query_url)
MessagingBasicAuth.apply(self.config, _request)
_response = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _response.status_code == 400:
raise MessagingException('400 Request is malformed or invalid', _response)
elif _response.status_code == 401:
raise MessagingException('401 The specified user does not have access to the account', _response)
elif _response.status_code == 403:
raise MessagingException('403 The user does not have access to this API', _response)
elif _response.status_code == 404:
raise MessagingException('404 Path not found', _response)
elif _response.status_code == 415:
raise MessagingException('415 The content-type of the request is incorrect', _response)
elif _response.status_code == 429:
raise MessagingException('429 The rate limit has been reached', _response)
self.validate_response(_response)
# Return appropriate type
return ApiResponse(_response)
def create_message(self,
user_id,
body=None):
"""Does a POST request to /users/{userId}/messages.
createMessage
Args:
user_id (string): TODO: type description here.
body (MessageRequest, optional): TODO: type description here.
Returns:
ApiResponse: An object with the response value as well as other
useful information such as status codes and headers.
successful operation
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/users/{userId}/messages'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'userId': {'value': user_id, 'encode': True}
})
_query_builder = self.config.get_base_uri(Server.MESSAGINGDEFAULT)
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.config.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
MessagingBasicAuth.apply(self.config, _request)
_response = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _response.status_code == 400:
raise MessagingException('400 Request is malformed or invalid', _response)
elif _response.status_code == 401:
raise MessagingException('401 The specified user does not have access to the account', _response)
elif _response.status_code == 403:
raise MessagingException('403 The user does not have access to this API', _response)
elif _response.status_code == 404:
raise MessagingException('404 Path not found', _response)
elif _response.status_code == 415:
raise MessagingException('415 The content-type of the request is incorrect', _response)
elif _response.status_code == 429:
raise MessagingException('429 The rate limit has been reached', _response)
self.validate_response(_response)
decoded = APIHelper.json_deserialize(_response.text, BandwidthMessage.from_dictionary)
_result = ApiResponse(_response, body=decoded)
return _result
| 43.196532
| 121
| 0.630202
| 1,618
| 14,946
| 5.605068
| 0.118665
| 0.046312
| 0.059544
| 0.071673
| 0.849928
| 0.822141
| 0.807807
| 0.807807
| 0.780571
| 0.755651
| 0
| 0.017581
| 0.299746
| 14,946
| 345
| 122
| 43.321739
| 0.848939
| 0.264887
| 0
| 0.726257
| 1
| 0
| 0.164296
| 0.016229
| 0
| 0
| 0
| 0.04058
| 0
| 1
| 0.03352
| false
| 0
| 0.050279
| 0
| 0.117318
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3ab034f6b6e34664ebc2257ed1e2a58b100ec252
| 222
|
py
|
Python
|
winapi_error.py
|
jacoblusk/python-dll-injector
|
40ecf5162e2d2f24d6d4b71959a790aca59edd22
|
[
"BSD-3-Clause"
] | 1
|
2021-12-04T09:23:08.000Z
|
2021-12-04T09:23:08.000Z
|
winapi_error.py
|
jacoblusk/Python-DLL-Injector
|
40ecf5162e2d2f24d6d4b71959a790aca59edd22
|
[
"BSD-3-Clause"
] | 1
|
2021-12-04T07:52:15.000Z
|
2021-12-04T07:52:15.000Z
|
winapi_error.py
|
jacoblusk/python-dll-injector
|
40ecf5162e2d2f24d6d4b71959a790aca59edd22
|
[
"BSD-3-Clause"
] | null | null | null |
import ctypes
def LPVOID_errcheck(result, func, args):
if not result:
raise ctypes.WinError()
return result
def Win32API_errcheck(result, func, args):
if not result:
raise ctypes.WinError()
| 17.076923
| 42
| 0.675676
| 28
| 222
| 5.285714
| 0.5
| 0.189189
| 0.243243
| 0.297297
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0.702703
| 0
| 0.011905
| 0.243243
| 222
| 12
| 43
| 18.5
| 0.869048
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3af37664a16d2a280593d79679a99322b1892870
| 5,652
|
py
|
Python
|
catkin_ws/build/stdr_simulator/stdr_msgs/cmake/stdr_msgs-genmsg-context.py
|
PMinThant/ROS-with-Cpp
|
5a63722b4817b02b5e2e66c14a9ff88a4fe2fbd7
|
[
"MIT"
] | null | null | null |
catkin_ws/build/stdr_simulator/stdr_msgs/cmake/stdr_msgs-genmsg-context.py
|
PMinThant/ROS-with-Cpp
|
5a63722b4817b02b5e2e66c14a9ff88a4fe2fbd7
|
[
"MIT"
] | null | null | null |
catkin_ws/build/stdr_simulator/stdr_msgs/cmake/stdr_msgs-genmsg-context.py
|
PMinThant/ROS-with-Cpp
|
5a63722b4817b02b5e2e66c14a9ff88a4fe2fbd7
|
[
"MIT"
] | null | null | null |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/Noise.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/LaserSensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/SonarSensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/KinematicMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/FootprintMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RobotMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RobotIndexedMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RobotIndexedVectorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RfidSensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RfidSensorMeasurementMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RfidTag.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/RfidTagVector.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/SoundSensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/SoundSensorMeasurementMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/SoundSource.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/SoundSourceVector.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/ThermalSensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/ThermalSensorMeasurementMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/ThermalSource.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/ThermalSourceVector.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/CO2SensorMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/CO2SensorMeasurementMsg.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/CO2Source.msg;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg/CO2SourceVector.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotAction.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotActionGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotActionResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotActionFeedback.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/RegisterRobotFeedback.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotAction.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotActionGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotActionResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotActionFeedback.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/SpawnRobotFeedback.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotAction.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotActionGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotActionResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotActionFeedback.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotGoal.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotResult.msg;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg/DeleteRobotFeedback.msg"
services_str = "/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/LoadMap.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/LoadExternalMap.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/RegisterGui.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/MoveRobot.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/AddRfidTag.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/DeleteRfidTag.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/AddThermalSource.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/DeleteThermalSource.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/AddSoundSource.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/DeleteSoundSource.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/AddCO2Source.srv;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/srv/DeleteCO2Source.srv"
pkg_name = "stdr_msgs"
dependencies_str = "std_msgs;geometry_msgs;nav_msgs;actionlib_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "stdr_msgs;/home/edi/ros_revision/catkin_ws/src/stdr_simulator/stdr_msgs/msg;stdr_msgs;/home/edi/ros_revision/catkin_ws/devel/share/stdr_msgs/msg;std_msgs;/opt/ros/noetic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/noetic/share/geometry_msgs/cmake/../msg;nav_msgs;/opt/ros/noetic/share/nav_msgs/cmake/../msg;actionlib_msgs;/opt/ros/noetic/share/actionlib_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python3"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/noetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 471
| 3,841
| 0.859873
| 906
| 5,652
| 5.093819
| 0.124724
| 0.107476
| 0.127844
| 0.230119
| 0.707692
| 0.689491
| 0.689491
| 0.689491
| 0.686024
| 0.686024
| 0
| 0.001246
| 0.006016
| 5,652
| 11
| 3,842
| 513.818182
| 0.820221
| 0.00867
| 0
| 0
| 1
| 0.333333
| 0.963756
| 0.958579
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3af4c1e50e436c9bb21991c2fbd00e06ddfb3072
| 271
|
py
|
Python
|
pycreds/__init__.py
|
kumaraditya303/pycreds
|
c3f5fa3c91705da17394778fcdb123720935d279
|
[
"BSD-3-Clause"
] | 3
|
2021-01-26T10:33:01.000Z
|
2021-08-02T03:10:44.000Z
|
pycreds/__init__.py
|
kumaraditya303/pycreds
|
c3f5fa3c91705da17394778fcdb123720935d279
|
[
"BSD-3-Clause"
] | 65
|
2021-01-26T11:43:16.000Z
|
2022-03-31T08:08:13.000Z
|
pycreds/__init__.py
|
kumaraditya303/pycreds
|
c3f5fa3c91705da17394778fcdb123720935d279
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from ._pycreds import (
delete_password,
find_credentials,
find_password,
get_password,
set_password,
)
__all__ = [
"delete_password",
"find_credentials",
"find_password",
"get_password",
"set_password",
]
| 15.941176
| 23
| 0.630996
| 27
| 271
| 5.777778
| 0.481481
| 0.179487
| 0.230769
| 0.371795
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0.807692
| 0
| 0.004854
| 0.239852
| 271
| 16
| 24
| 16.9375
| 0.752427
| 0.077491
| 0
| 0
| 0
| 0
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.571429
| 0.071429
| 0
| 0.071429
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
c9454f31a8fe86a162031c8ab5c0b8254db4d985
| 26,507
|
py
|
Python
|
beem.py
|
eremmev/generator
|
0df8a91ce712ee8c62f599a2766b13bb18ee9ae3
|
[
"MIT"
] | null | null | null |
beem.py
|
eremmev/generator
|
0df8a91ce712ee8c62f599a2766b13bb18ee9ae3
|
[
"MIT"
] | null | null | null |
beem.py
|
eremmev/generator
|
0df8a91ce712ee8c62f599a2766b13bb18ee9ae3
|
[
"MIT"
] | null | null | null |
import random
print('1. Make Password')
op1 = input('Enter operation: ')
while op1 != '1':
print('Unkown num!')
op1 = input('Enter operation: ')
op2 = input('Enter the number of characters(min 8,max 16): ')
while op2 != '8':
if op2 == '9':
for step in range(9):
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '10':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '11':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '12':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '13':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '14':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '15':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
elif op2 == '16':
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
op2 = input('\nEnter the number of characters(min 8,max 16): ')
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
print(random.choice([0,1,2,3,4,5,6,7,8,9,'q','w','e','r','t','y','u','i','o','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m','Q','W','E','R','T','Y','U','I','O','A','S','D','F','G','H','J','K','L','Z','X','C','V','B','N','M']), end = "")
input()
| 220.891667
| 266
| 0.309428
| 6,411
| 26,507
| 1.279364
| 0.010919
| 0.048281
| 0.072421
| 0.096562
| 0.979151
| 0.979151
| 0.979151
| 0.979151
| 0.97208
| 0.97208
| 0
| 0.04095
| 0.052024
| 26,507
| 119
| 267
| 222.747899
| 0.285458
| 0
| 0
| 0.855932
| 0
| 0
| 0.194103
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.008475
| 0.008475
| 0
| 0.008475
| 0.855932
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
c9762741b6fad6de78f229a74dc9fdad02e626c3
| 12,311
|
py
|
Python
|
ReservoirClass.py
|
E-dog91/Reservoir-Reinforcement-LEarning-for-Trading-
|
3e91ee67f899555b2b02affcdf69d510671ad387
|
[
"MIT"
] | null | null | null |
ReservoirClass.py
|
E-dog91/Reservoir-Reinforcement-LEarning-for-Trading-
|
3e91ee67f899555b2b02affcdf69d510671ad387
|
[
"MIT"
] | null | null | null |
ReservoirClass.py
|
E-dog91/Reservoir-Reinforcement-LEarning-for-Trading-
|
3e91ee67f899555b2b02affcdf69d510671ad387
|
[
"MIT"
] | null | null | null |
import abc
import pandas as pd
import numpy as np
from typing import Union, List
import torch
import torch.nn as nn
import timeit
from rc_class.model_esn_ridge import Model_esn_ridge
from rc_class.leaky_echo_cell import Leaky_echo_cell
from matrix_generator.matrix_generator import Matrix_generator
# setting the seeds.
torch.manual_seed(0)
np.random.seed(0)
class Reservoir():
"""Creates observation space from data frame of asset information"""
def __init__(self):
pass
@abc.abstractmethod
def observation(self, df):
raise NotImplementedError
@abc.abstractmethod
def observation_space_size(self, num_assets:int = 1, window_size: int = 1) -> int:
raise NotImplementedError
class RNNAdvisor(Reservoir):
def __init__(self, num_features: int, num_layers:int, num_assets: int = 16, nonlinearity = 'relu'):
super(RNNAdvisor, self).__init__()
self.num_features = num_features
self.net = nn.RNN(
input_size = 5*num_assets,
hidden_size = num_features,
num_layers = num_layers,
nonlinearity = nonlinearity,
batch_first = True,
)
self.num_layers = num_layers
def observation_space_size(self, num_assets: int = 1, window_size: int = 1) -> int:
return self.num_features*self.num_layers
def observation(self, data: Union[pd.DataFrame, List[pd.DataFrame]]):
if isinstance(data, pd.DataFrame):
data = [data]
num_assets = len(data)
#Drop last row to exclude current values from following observations
for i in range(len(data)):
data[i] = data[i][:-1]
#Concatenate different asset dataframes to form one tensor
input = torch.concat([torch.tensor(asset_data.to_numpy()) for asset_data in data], axis = 1)
input = torch.unsqueeze(input, 0)
#Run network and return hidden layer
output, hidden = self.net(input.float())
hidden = hidden.flatten()
return hidden.detach().numpy()
class ReservoirRNN(Reservoir):
def __init__(self, num_features: int, num_layers: int, num_assets: int = 16, nonlinearity='relu'):
super(ReservoirRNN, self).__init__()
self.num_features = num_features
self._reservoir= nn.RNN(
input_size=6*num_assets,
hidden_size=num_features,
num_layers=num_layers,
nonlinearity=nonlinearity,
batch_first=True,
)
self.num_layers = num_layers
self.hidden_dim = num_features
def observation_space_size(self, num_assets: int = 1, window_size: int = 1) -> int:
return self.num_features * self.num_layers
def observation(self, data: Union[pd.DataFrame, List[pd.DataFrame]]):
if isinstance(data, pd.DataFrame):
data = [data]
num_assets = len(data)
#Drop last row to exclude current values from following observations
#for i in range(len(data)):
# data[i] = data[i][:-1]
#Concatenate different asset dataframes to form one tensor
#list_tensor = [torch.tensor(df) for df in data]
input = torch.concat([torch.tensor(asset_data.to_numpy()) for asset_data in data], axis = 1)
input.reshape((1,input.shape[0],input.shape[1]))
input = torch.unsqueeze(input, 0)
h0 = torch.zeros((self.num_layers,1,self.hidden_dim))
#Run network and return hidden layer
hidden,out = self._reservoir(input.float(), h0.float())
#hidden = hidden.squeeze(0)
#hidden_keep = hidden[:][-2:]
#hidden_keep = hidden_keep.flatten()
out = out.flatten()
#state = np.concatenate((out.detach().numpy(),hidden_keep.detach().numpy()))
return out.detach().numpy()
class ReservoirLSTM(Reservoir):
def __init__(self, num_features: int, num_layers: int, num_assets: int = 16,dropout=0.3):
super(ReservoirLSTM, self).__init__()
self.num_features = num_features
self._reservoir= nn.LSTM(
input_size=6*num_assets,
hidden_size=num_features,
num_layers=num_layers,
batch_first=True,
dropout=dropout
)
self.num_layers = num_layers
self.hidden_dim = num_features
def observation_space_size(self, num_assets: int = 1, window_size: int = 1) -> int:
return self.num_features * self.num_layers
def observation(self, data: Union[pd.DataFrame, List[pd.DataFrame]]):
if isinstance(data, pd.DataFrame):
data = [data]
num_assets = len(data)
#Drop last row to exclude current values from following observations
#for i in range(len(data)):
# data[i] = data[i][:-1]
#Concatenate different asset dataframes to form one tensor
#list_tensor = [torch.tensor(df) for df in data]
input = torch.concat([torch.tensor(asset_data.to_numpy()) for asset_data in data], axis = 1)
input.reshape((1,input.shape[1],input.shape[0]))
input = torch.unsqueeze(input, 0)
#h0 = torch.zeros((self.num_layers,1,self.hidden_dim))
#Run network and return hidden layer
out, (h,c) = self._reservoir(input.float())
#hidden = hidden.squeeze(0)
#hidden_keep = hidden[:][-2:]
#hidden_keep = hidden_keep.flatten()
h = h.flatten()
#state = np.concatenate((out.detach().numpy(),hidden_keep.detach().numpy()))
return h.detach().numpy()
class ReservoirLeakyESN(Reservoir):
def __init__(self, num_features: int, w_generator, win_generator,
wbias_generator, h0_Generator, h0_params, hidden_dim = 126,
num_assets = 16,
learning_algo='inv', ridge_param=0.0, washout=0, dtype=torch.float32,
nonlin_fct=torch.relu, leak_rate=0.1):
super(ReservoirLeakyESN, self).__init__()
# dim rec bis = numfeature
# dim rec = dim hidden
W_ih, W_hh, b_ih, _ = self._generate_matrices(w_generator=w_generator,win_generator=win_generator,
wbias_generator= wbias_generator,
dtype=dtype,
dim_in=6*num_assets,
dim_rec=hidden_dim,
dim_rec_bis=num_features)
self._reservoir_ = Leaky_echo_cell(input_dim=6*num_assets,hidden_dim=hidden_dim,W=W_hh, W_in=W_ih, W_bias=b_ih,
nonlin_fct=nonlin_fct, leak_rate=leak_rate)
self.h0_Generator = h0_Generator
self._h0_params = h0_params
self._hidden_dim = hidden_dim
self._num_features = num_features
def observation_space_size(self, num_assets:int = 1, window_size: int = 1) -> int:
return self._num_features
def observation(self, data: Union[pd.DataFrame, List[pd.DataFrame]]):
if isinstance(data, pd.DataFrame):
data = [data]
num_assets = len(data)
#Drop last row to exclude current values from following observations
#for i in range(len(data)):
# data[i] = data[i][:-1]
#Concatenate different asset dataframes to form one tensor
input = torch.concat([torch.tensor(asset_data.to_numpy()) for asset_data in data], axis = 1)
#input = torch.transpose(input,0,1)
input = torch.unsqueeze(input, 0)
#input.reshape((1,input.shape[2],input.shape[1]))
#h0 = torch.zeros((self.num_layers,1,self.hidden_dim))
#Run reservoir_info and return hidden layer
h0 = self._hidden_state_init()
_,h = self._reservoir_(input.float(),h0)
h = h.flatten()
return h.detach().numpy()
def _generate_matrices(self, w_generator, win_generator, wbias_generator, dtype, dim_in, dim_rec, dim_rec_bis):
def branching_type_input(matrix, size_a, size_b):
if size_b != 1:
size = (size_a, size_b)
else:
size = (size_a)
if isinstance(matrix, Matrix_generator):
matrix_out = matrix.generate(size=size, dtype=dtype)
elif callable(matrix):
matrix_out = matrix(size=size, dtype=dtype)
else:
matrix_out = matrix
return matrix_out
W_ih = branching_type_input(w_generator, dim_rec_bis, dim_in)
W_hh = branching_type_input(win_generator, dim_rec_bis, dim_rec)
W_bias_ih = branching_type_input(wbias_generator, dim_rec_bis, 1)
W_bias_hh = branching_type_input(wbias_generator, dim_rec_bis, 1)
return W_ih, W_hh, W_bias_ih, W_bias_hh
def _hidden_state_init(self):
h0 = self.h0_Generator(**self._h0_params).generate(size=(1, self._hidden_dim), dtype=torch.float32)
return h0
class StateReservoirLeakyESN(Reservoir):
def __init__(self, window_size: int,num_features: int, w_generator, win_generator,
wbias_generator, h0_Generator, h0_params, hidden_dim = 126,
num_assets = 16,
learning_algo='inv', ridge_param=0.0, washout=0, dtype=torch.float32,
nonlin_fct=torch.relu, leak_rate=0.1):
super(StateReservoirLeakyESN, self).__init__()
# dim rec bis = numfeature
# dim rec = dim hidden
# (self.window_size-1)*self.number_of_assets + 3 + 4*(self.number_of_assets)
W_ih, W_hh, b_ih, _ = self._generate_matrices(w_generator=w_generator,win_generator=win_generator,
wbias_generator= wbias_generator,
dtype=dtype,
dim_in=(window_size+3)*num_assets+3,
dim_rec=hidden_dim,
dim_rec_bis=num_features)
self._reservoir_ = Leaky_echo_cell(input_dim=(window_size+3)*num_assets+3,hidden_dim=hidden_dim,W=W_hh, W_in=W_ih, W_bias=b_ih,
nonlin_fct=nonlin_fct, leak_rate=leak_rate)
self.h0_Generator = h0_Generator
self._h0_params = h0_params
self._hidden_dim = hidden_dim
self._num_features = num_features
def observation_space_size(self, num_assets:int = 1, window_size: int = 1) -> int:
return self._num_features
def observation(self, state: np.array):
input = torch.tensor(state)
input = torch.unsqueeze(input, 0)
#Run reservoir_info and return hidden layer
h0 = self._hidden_state_init()
_,h = self._reservoir_(input.float(),h0)
h = h.flatten()
return h.detach().numpy()
def _generate_matrices(self, w_generator, win_generator, wbias_generator, dtype, dim_in, dim_rec, dim_rec_bis):
def branching_type_input(matrix, size_a, size_b):
if size_b != 1:
size = (size_a, size_b)
else:
size = (size_a)
if isinstance(matrix, Matrix_generator):
matrix_out = matrix.generate(size=size, dtype=dtype)
elif callable(matrix):
matrix_out = matrix(size=size, dtype=dtype)
else:
matrix_out = matrix
return matrix_out
W_ih = branching_type_input(w_generator, dim_rec_bis, dim_in)
W_hh = branching_type_input(win_generator, dim_rec_bis, dim_rec)
W_bias_ih = branching_type_input(wbias_generator, dim_rec_bis, 1)
W_bias_hh = branching_type_input(wbias_generator, dim_rec_bis, 1)
return W_ih, W_hh, W_bias_ih, W_bias_hh
def _hidden_state_init(self):
h0 = self.h0_Generator(**self._h0_params).generate(size=(1, self._hidden_dim), dtype=torch.float32)
return h0
| 39.970779
| 136
| 0.600195
| 1,533
| 12,311
| 4.531637
| 0.104371
| 0.029221
| 0.030229
| 0.020728
| 0.852742
| 0.843242
| 0.823233
| 0.820354
| 0.814884
| 0.814884
| 0
| 0.015074
| 0.304849
| 12,311
| 307
| 137
| 40.100977
| 0.796681
| 0.138088
| 0
| 0.722222
| 0
| 0
| 0.001366
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0.005051
| 0.050505
| 0.025253
| 0.282828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a320dd7e77dd3ca33ad21b26c43021f4d6ddf414
| 123
|
py
|
Python
|
tests/test_generate_cmds_stubs.py
|
tomaszkurgan/dcc_generate_stubs
|
3595d2b37f7a1fd0f23f624943168d9adeeed2eb
|
[
"MIT"
] | 1
|
2018-01-18T09:58:35.000Z
|
2018-01-18T09:58:35.000Z
|
tests/test_generate_cmds_stubs.py
|
tomaszkurgan/maya_generate_stubs
|
3595d2b37f7a1fd0f23f624943168d9adeeed2eb
|
[
"MIT"
] | null | null | null |
tests/test_generate_cmds_stubs.py
|
tomaszkurgan/maya_generate_stubs
|
3595d2b37f7a1fd0f23f624943168d9adeeed2eb
|
[
"MIT"
] | null | null | null |
from maya_generate_stubs.impl import generate_cmds_stubs
generate_cmds_stubs.generate_stubs(maya_version='2017', limit=3)
| 30.75
| 64
| 0.869919
| 19
| 123
| 5.210526
| 0.578947
| 0.262626
| 0.343434
| 0.505051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043103
| 0.056911
| 123
| 3
| 65
| 41
| 0.810345
| 0
| 0
| 0
| 1
| 0
| 0.03252
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
6ed00722fa978f1c7cfa5ce6042c07b996eb9645
| 4,269
|
py
|
Python
|
tests/test_aggregate_store.py
|
abrookins/storey
|
ebcb1daba5d72e1a7f6e5cd7ea760248dd4f72e5
|
[
"Apache-2.0"
] | 39
|
2020-11-01T15:27:22.000Z
|
2022-02-25T09:44:20.000Z
|
tests/test_aggregate_store.py
|
abrookins/storey
|
ebcb1daba5d72e1a7f6e5cd7ea760248dd4f72e5
|
[
"Apache-2.0"
] | 30
|
2020-09-10T08:41:16.000Z
|
2022-02-24T10:05:55.000Z
|
tests/test_aggregate_store.py
|
abrookins/storey
|
ebcb1daba5d72e1a7f6e5cd7ea760248dd4f72e5
|
[
"Apache-2.0"
] | 16
|
2020-09-09T14:09:59.000Z
|
2021-09-15T18:28:35.000Z
|
from storey.table import ReadOnlyAggregationBuckets
from storey.dtypes import SlidingWindows
from datetime import datetime
def _assert_buckets(window, base_time, initial_data, expected_data):
aggr_buckets = ReadOnlyAggregationBuckets("test", "count", window, None, base_time, None, initial_data=initial_data)
actual = [aggr_value.value for aggr_value in aggr_buckets.buckets]
assert actual == expected_data
def test_load_aggregation_bucket():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:40:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time - window.max_window_millis: [1, 0, 0, 0, 1, 2],
curr_bucket_time: [1, 0, 2, 1, 1, 0]}
expected_data = [2, 1, 0, 2, 1, 1]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_data_two_stored_buckets_requested_data_newer_than_both():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:40:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time - 3 * window.max_window_millis: [1, 0, 0, 0, 1, 2],
curr_bucket_time - 2 * window.max_window_millis: [1, 0, 2, 1, 1, 1]}
expected_data = [0, 0, 0, 0, 0, 0]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_data_two_stored_buckets_requested_data_newer():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:25:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time - 2 * window.max_window_millis: [1, 0, 0, 0, 1, 2],
curr_bucket_time - window.max_window_millis: [1, 0, 2, 2, 1, 1]}
expected_data = [2, 1, 1, 0, 0, 0]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_data_two_stored_buckets_requested_data_older():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:40:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time + window.max_window_millis: [1, 0, 0, 0, 1, 2],
curr_bucket_time + 2 * window.max_window_millis: [1, 0, 2, 1, 1, 0]}
expected_data = [0, 0, 0, 0, 0, 0]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_one_stored_bucket():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:40:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time: [1, 0, 2, 1, 1, 0]}
expected_data = [0, 1, 0, 2, 1, 1]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_one_stored_bucket_requested_data_newer():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:25:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time - window.max_window_millis: [1, 0, 2, 1, 1, 3]}
expected_data = [1, 1, 3, 0, 0, 0]
_assert_buckets(window, test_base_time, initial_data, expected_data)
def test_load_aggregation_bucket_one_stored_bucket_requested_data_older():
test_base_time = int(datetime.fromisoformat("2020-07-21T21:40:00+00:00").timestamp() * 1000)
window = SlidingWindows(["1h"], "10m")
curr_bucket_time = int(test_base_time / window.max_window_millis) * window.max_window_millis
initial_data = {curr_bucket_time + window.max_window_millis: [1, 0, 2, 1, 1, 0]}
expected_data = [0, 0, 0, 0, 0, 0]
_assert_buckets(window, test_base_time, initial_data, expected_data)
| 54.730769
| 120
| 0.727805
| 643
| 4,269
| 4.461897
| 0.085537
| 0.018822
| 0.120251
| 0.168351
| 0.884977
| 0.884977
| 0.874172
| 0.862321
| 0.862321
| 0.861973
| 0
| 0.079107
| 0.150152
| 4,269
| 77
| 121
| 55.441558
| 0.711687
| 0
| 0
| 0.516667
| 0
| 0
| 0.0513
| 0.040993
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.133333
| false
| 0
| 0.05
| 0
| 0.183333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42b897cc64c2abbe1a9662ab78afd7e4a6cc20ec
| 162,327
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sdr_invmgr_diag_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sdr_invmgr_diag_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sdr_invmgr_diag_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_sdr_invmgr_diag_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR sdr\-invmgr\-diag package operational data.
This module contains definitions
for the following management objects\:
diag\: Diag information
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class Diag(object):
"""
Diag information
.. attribute:: racks
Table of racks
**type**\: :py:class:`Racks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.racks = Diag.Racks()
self.racks.parent = self
class Racks(object):
"""
Table of racks
.. attribute:: rack
Rack name
**type**\: list of :py:class:`Rack <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rack = YList()
self.rack.parent = self
self.rack.name = 'rack'
class Rack(object):
"""
Rack name
.. attribute:: rack_name <key>
Rack name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: chassis
Chassis information
**type**\: :py:class:`Chassis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Chassis>`
.. attribute:: fan_traies
Table for rack fan trays
**type**\: :py:class:`FanTraies <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies>`
.. attribute:: power_shelfs
Table for rack power shelf
**type**\: :py:class:`PowerShelfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs>`
.. attribute:: slots
Table of slots
**type**\: :py:class:`Slots <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rack_name = None
self.chassis = Diag.Racks.Rack.Chassis()
self.chassis.parent = self
self.fan_traies = Diag.Racks.Rack.FanTraies()
self.fan_traies.parent = self
self.power_shelfs = Diag.Racks.Rack.PowerShelfs()
self.power_shelfs.parent = self
self.slots = Diag.Racks.Rack.Slots()
self.slots.parent = self
class PowerShelfs(object):
"""
Table for rack power shelf
.. attribute:: power_shelf
Power shelf name
**type**\: list of :py:class:`PowerShelf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs.PowerShelf>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.power_shelf = YList()
self.power_shelf.parent = self
self.power_shelf.name = 'power_shelf'
class PowerShelf(object):
"""
Power shelf name
.. attribute:: power_shelf_name <key>
Power Shelf name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: power_supplies
Table for rack power supply
**type**\: :py:class:`PowerSupplies <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.power_shelf_name = None
self.power_supplies = Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies()
self.power_supplies.parent = self
class PowerSupplies(object):
"""
Table for rack power supply
.. attribute:: power_supply
Power Supply name
**type**\: list of :py:class:`PowerSupply <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.power_supply = YList()
self.power_supply.parent = self
self.power_supply.name = 'power_supply'
class PowerSupply(object):
"""
Power Supply name
.. attribute:: power_supply_name <key>
Power Supply name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: information
Basic information
**type**\: :py:class:`Information <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.power_supply_name = None
self.information = Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information()
self.information.parent = self
class Information(object):
"""
Basic information
.. attribute:: asset_alias
Asset Alias
**type**\: str
**length:** 0..255
.. attribute:: asset_id
Asset ID
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address1
Base Mac Address #1
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address2
Base Mac Address #2
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address3
Base Mac Address #3
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address4
Base Mac Address #4
**type**\: str
**length:** 0..255
.. attribute:: block_checksum
Block Checksum
**type**\: str
**length:** 0..255
.. attribute:: block_count
Block Count
**type**\: str
**length:** 0..255
.. attribute:: block_length
Block Length
**type**\: str
**length:** 0..255
.. attribute:: block_signature
Block Signature
**type**\: str
**length:** 0..255
.. attribute:: block_version
Block Version
**type**\: str
**length:** 0..255
.. attribute:: chassis_sid
Chassis serial number
**type**\: str
**length:** 0..255
.. attribute:: clei
Common Language Equipment Identifier (CLEI) code
**type**\: str
**length:** 0..255
.. attribute:: controller_family
Controller family
**type**\: str
**length:** 0..255
.. attribute:: controller_type
Controller type
**type**\: str
**length:** 0..255
.. attribute:: description
A textual description of physical entity
**type**\: str
**length:** 0..255
.. attribute:: dev_num1
Deviation Number # 1
**type**\: str
**length:** 0..255
.. attribute:: dev_num2
Deviation Number # 2
**type**\: str
**length:** 0..255
.. attribute:: dev_num3
Deviation Number # 3
**type**\: str
**length:** 0..255
.. attribute:: dev_num4
Deviation Number # 4
**type**\: str
**length:** 0..255
.. attribute:: dev_num5
Deviation Number # 5
**type**\: str
**length:** 0..255
.. attribute:: dev_num6
Deviation Number # 6
**type**\: str
**length:** 0..255
.. attribute:: dev_num7
Deviation Number # 7
**type**\: str
**length:** 0..255
.. attribute:: eci
Equipment Catalog Item (ECI) number
**type**\: str
**length:** 0..255
.. attribute:: eeprom_size
EEPROM Size
**type**\: str
**length:** 0..255
.. attribute:: engineer_use
Engineer Use
**type**\: str
**length:** 0..255
.. attribute:: fru_major_type
FRU Major Type
**type**\: str
**length:** 0..255
.. attribute:: fru_minor_type
FRU Minor Type
**type**\: str
**length:** 0..255
.. attribute:: hw_version
Hardware Version
**type**\: str
**length:** 0..255
.. attribute:: hwid
Hardware Revision
**type**\: str
**length:** 0..255
.. attribute:: idprom_format_rev
IDPROM Format Revision
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size1
Mac Address Block Size #1
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size2
Mac Address Block Size #2
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size3
Mac Address Block Size #3
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size4
Mac Address Block Size #4
**type**\: str
**length:** 0..255
.. attribute:: manu_test_data
Manufacturing Test Data
**type**\: str
**length:** 0..255
.. attribute:: mfg_bits
MFG Bits
**type**\: str
**length:** 0..255
.. attribute:: mfg_deviation
MFG Deviation
**type**\: str
**length:** 0..255
.. attribute:: oem_string
OEM String
**type**\: str
**length:** 0..255
.. attribute:: part_number
Part Number
**type**\: str
**length:** 0..255
.. attribute:: part_revision
Part Revision
**type**\: str
**length:** 0..255
.. attribute:: pca_num
PCA number
**type**\: str
**length:** 0..255
.. attribute:: pcavid
PCA revision ID
**type**\: str
**length:** 0..255
.. attribute:: pcb_serial_num
PCB Serial Number
**type**\: str
**length:** 0..255
.. attribute:: pid
Product ID
**type**\: str
**length:** 0..255
.. attribute:: power_consumption
Power Consumption
**type**\: str
**length:** 0..255
.. attribute:: power_supply_type
Power Supply Type
**type**\: str
**length:** 0..255
.. attribute:: product_id
Product ID
**type**\: str
**length:** 0..255
.. attribute:: rma
RMA Data
**type**\: :py:class:`Rma <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information.Rma>`
.. attribute:: rma_code
RMA Code
**type**\: str
**length:** 0..255
.. attribute:: serial_number
Serial Number
**type**\: str
**length:** 0..255
.. attribute:: snmpoid
SNMP OID
**type**\: str
**length:** 0..255
.. attribute:: top_assem_part_num
Top assembly part number
**type**\: str
**length:** 0..255
.. attribute:: top_assem_vid
Top assembly revision number
**type**\: str
**length:** 0..255
.. attribute:: udi_description
UDI description
**type**\: str
**length:** 0..255
.. attribute:: udi_name
UDI name
**type**\: str
**length:** 0..255
.. attribute:: vid
Version ID
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.asset_alias = None
self.asset_id = None
self.base_mac_address1 = None
self.base_mac_address2 = None
self.base_mac_address3 = None
self.base_mac_address4 = None
self.block_checksum = None
self.block_count = None
self.block_length = None
self.block_signature = None
self.block_version = None
self.chassis_sid = None
self.clei = None
self.controller_family = None
self.controller_type = None
self.description = None
self.dev_num1 = None
self.dev_num2 = None
self.dev_num3 = None
self.dev_num4 = None
self.dev_num5 = None
self.dev_num6 = None
self.dev_num7 = None
self.eci = None
self.eeprom_size = None
self.engineer_use = None
self.fru_major_type = None
self.fru_minor_type = None
self.hw_version = None
self.hwid = None
self.idprom_format_rev = None
self.mac_add_blk_size1 = None
self.mac_add_blk_size2 = None
self.mac_add_blk_size3 = None
self.mac_add_blk_size4 = None
self.manu_test_data = None
self.mfg_bits = None
self.mfg_deviation = None
self.oem_string = None
self.part_number = None
self.part_revision = None
self.pca_num = None
self.pcavid = None
self.pcb_serial_num = None
self.pid = None
self.power_consumption = None
self.power_supply_type = None
self.product_id = None
self.rma = Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information.Rma()
self.rma.parent = self
self.rma_code = None
self.serial_number = None
self.snmpoid = None
self.top_assem_part_num = None
self.top_assem_vid = None
self.udi_description = None
self.udi_name = None
self.vid = None
class Rma(object):
"""
RMA Data
.. attribute:: rma_history
RMA history
**type**\: str
**length:** 0..255
.. attribute:: rma_number
RMA tracking number format is N\-N\-N
**type**\: str
**length:** 0..255
.. attribute:: test_history
Test history
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rma_history = None
self.rma_number = None
self.test_history = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:rma'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rma_history is not None:
return True
if self.rma_number is not None:
return True
if self.test_history is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information.Rma']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:information'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.asset_alias is not None:
return True
if self.asset_id is not None:
return True
if self.base_mac_address1 is not None:
return True
if self.base_mac_address2 is not None:
return True
if self.base_mac_address3 is not None:
return True
if self.base_mac_address4 is not None:
return True
if self.block_checksum is not None:
return True
if self.block_count is not None:
return True
if self.block_length is not None:
return True
if self.block_signature is not None:
return True
if self.block_version is not None:
return True
if self.chassis_sid is not None:
return True
if self.clei is not None:
return True
if self.controller_family is not None:
return True
if self.controller_type is not None:
return True
if self.description is not None:
return True
if self.dev_num1 is not None:
return True
if self.dev_num2 is not None:
return True
if self.dev_num3 is not None:
return True
if self.dev_num4 is not None:
return True
if self.dev_num5 is not None:
return True
if self.dev_num6 is not None:
return True
if self.dev_num7 is not None:
return True
if self.eci is not None:
return True
if self.eeprom_size is not None:
return True
if self.engineer_use is not None:
return True
if self.fru_major_type is not None:
return True
if self.fru_minor_type is not None:
return True
if self.hw_version is not None:
return True
if self.hwid is not None:
return True
if self.idprom_format_rev is not None:
return True
if self.mac_add_blk_size1 is not None:
return True
if self.mac_add_blk_size2 is not None:
return True
if self.mac_add_blk_size3 is not None:
return True
if self.mac_add_blk_size4 is not None:
return True
if self.manu_test_data is not None:
return True
if self.mfg_bits is not None:
return True
if self.mfg_deviation is not None:
return True
if self.oem_string is not None:
return True
if self.part_number is not None:
return True
if self.part_revision is not None:
return True
if self.pca_num is not None:
return True
if self.pcavid is not None:
return True
if self.pcb_serial_num is not None:
return True
if self.pid is not None:
return True
if self.power_consumption is not None:
return True
if self.power_supply_type is not None:
return True
if self.product_id is not None:
return True
if self.rma is not None and self.rma._has_data():
return True
if self.rma_code is not None:
return True
if self.serial_number is not None:
return True
if self.snmpoid is not None:
return True
if self.top_assem_part_num is not None:
return True
if self.top_assem_vid is not None:
return True
if self.udi_description is not None:
return True
if self.udi_name is not None:
return True
if self.vid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply.Information']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.power_supply_name is None:
raise YPYModelError('Key property power_supply_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:power-supply[Cisco-IOS-XR-sdr-invmgr-diag-oper:power-supply-name = ' + str(self.power_supply_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.power_supply_name is not None:
return True
if self.information is not None and self.information._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies.PowerSupply']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:power-supplies'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.power_supply is not None:
for child_ref in self.power_supply:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs.PowerShelf.PowerSupplies']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.power_shelf_name is None:
raise YPYModelError('Key property power_shelf_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:power-shelf[Cisco-IOS-XR-sdr-invmgr-diag-oper:power-shelf-name = ' + str(self.power_shelf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.power_shelf_name is not None:
return True
if self.power_supplies is not None and self.power_supplies._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs.PowerShelf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:power-shelfs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.power_shelf is not None:
for child_ref in self.power_shelf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.PowerShelfs']['meta_info']
class FanTraies(object):
"""
Table for rack fan trays
.. attribute:: fan_tray
Fan tray name
**type**\: list of :py:class:`FanTray <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies.FanTray>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.fan_tray = YList()
self.fan_tray.parent = self
self.fan_tray.name = 'fan_tray'
class FanTray(object):
"""
Fan tray name
.. attribute:: fan_tray_name <key>
Fan tray name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: fanses
Table for rack fans
**type**\: :py:class:`Fanses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies.FanTray.Fanses>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.fan_tray_name = None
self.fanses = Diag.Racks.Rack.FanTraies.FanTray.Fanses()
self.fanses.parent = self
class Fanses(object):
"""
Table for rack fans
.. attribute:: fans
Fan name
**type**\: list of :py:class:`Fans <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.fans = YList()
self.fans.parent = self
self.fans.name = 'fans'
class Fans(object):
"""
Fan name
.. attribute:: fans_name <key>
Fans name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: information
Basic information
**type**\: :py:class:`Information <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.fans_name = None
self.information = Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information()
self.information.parent = self
class Information(object):
"""
Basic information
.. attribute:: asset_alias
Asset Alias
**type**\: str
**length:** 0..255
.. attribute:: asset_id
Asset ID
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address1
Base Mac Address #1
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address2
Base Mac Address #2
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address3
Base Mac Address #3
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address4
Base Mac Address #4
**type**\: str
**length:** 0..255
.. attribute:: block_checksum
Block Checksum
**type**\: str
**length:** 0..255
.. attribute:: block_count
Block Count
**type**\: str
**length:** 0..255
.. attribute:: block_length
Block Length
**type**\: str
**length:** 0..255
.. attribute:: block_signature
Block Signature
**type**\: str
**length:** 0..255
.. attribute:: block_version
Block Version
**type**\: str
**length:** 0..255
.. attribute:: chassis_sid
Chassis serial number
**type**\: str
**length:** 0..255
.. attribute:: clei
Common Language Equipment Identifier (CLEI) code
**type**\: str
**length:** 0..255
.. attribute:: controller_family
Controller family
**type**\: str
**length:** 0..255
.. attribute:: controller_type
Controller type
**type**\: str
**length:** 0..255
.. attribute:: description
A textual description of physical entity
**type**\: str
**length:** 0..255
.. attribute:: dev_num1
Deviation Number # 1
**type**\: str
**length:** 0..255
.. attribute:: dev_num2
Deviation Number # 2
**type**\: str
**length:** 0..255
.. attribute:: dev_num3
Deviation Number # 3
**type**\: str
**length:** 0..255
.. attribute:: dev_num4
Deviation Number # 4
**type**\: str
**length:** 0..255
.. attribute:: dev_num5
Deviation Number # 5
**type**\: str
**length:** 0..255
.. attribute:: dev_num6
Deviation Number # 6
**type**\: str
**length:** 0..255
.. attribute:: dev_num7
Deviation Number # 7
**type**\: str
**length:** 0..255
.. attribute:: eci
Equipment Catalog Item (ECI) number
**type**\: str
**length:** 0..255
.. attribute:: eeprom_size
EEPROM Size
**type**\: str
**length:** 0..255
.. attribute:: engineer_use
Engineer Use
**type**\: str
**length:** 0..255
.. attribute:: fru_major_type
FRU Major Type
**type**\: str
**length:** 0..255
.. attribute:: fru_minor_type
FRU Minor Type
**type**\: str
**length:** 0..255
.. attribute:: hw_version
Hardware Version
**type**\: str
**length:** 0..255
.. attribute:: hwid
Hardware Revision
**type**\: str
**length:** 0..255
.. attribute:: idprom_format_rev
IDPROM Format Revision
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size1
Mac Address Block Size #1
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size2
Mac Address Block Size #2
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size3
Mac Address Block Size #3
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size4
Mac Address Block Size #4
**type**\: str
**length:** 0..255
.. attribute:: manu_test_data
Manufacturing Test Data
**type**\: str
**length:** 0..255
.. attribute:: mfg_bits
MFG Bits
**type**\: str
**length:** 0..255
.. attribute:: mfg_deviation
MFG Deviation
**type**\: str
**length:** 0..255
.. attribute:: oem_string
OEM String
**type**\: str
**length:** 0..255
.. attribute:: part_number
Part Number
**type**\: str
**length:** 0..255
.. attribute:: part_revision
Part Revision
**type**\: str
**length:** 0..255
.. attribute:: pca_num
PCA number
**type**\: str
**length:** 0..255
.. attribute:: pcavid
PCA revision ID
**type**\: str
**length:** 0..255
.. attribute:: pcb_serial_num
PCB Serial Number
**type**\: str
**length:** 0..255
.. attribute:: pid
Product ID
**type**\: str
**length:** 0..255
.. attribute:: power_consumption
Power Consumption
**type**\: str
**length:** 0..255
.. attribute:: power_supply_type
Power Supply Type
**type**\: str
**length:** 0..255
.. attribute:: product_id
Product ID
**type**\: str
**length:** 0..255
.. attribute:: rma
RMA Data
**type**\: :py:class:`Rma <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information.Rma>`
.. attribute:: rma_code
RMA Code
**type**\: str
**length:** 0..255
.. attribute:: serial_number
Serial Number
**type**\: str
**length:** 0..255
.. attribute:: snmpoid
SNMP OID
**type**\: str
**length:** 0..255
.. attribute:: top_assem_part_num
Top assembly part number
**type**\: str
**length:** 0..255
.. attribute:: top_assem_vid
Top assembly revision number
**type**\: str
**length:** 0..255
.. attribute:: udi_description
UDI description
**type**\: str
**length:** 0..255
.. attribute:: udi_name
UDI name
**type**\: str
**length:** 0..255
.. attribute:: vid
Version ID
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.asset_alias = None
self.asset_id = None
self.base_mac_address1 = None
self.base_mac_address2 = None
self.base_mac_address3 = None
self.base_mac_address4 = None
self.block_checksum = None
self.block_count = None
self.block_length = None
self.block_signature = None
self.block_version = None
self.chassis_sid = None
self.clei = None
self.controller_family = None
self.controller_type = None
self.description = None
self.dev_num1 = None
self.dev_num2 = None
self.dev_num3 = None
self.dev_num4 = None
self.dev_num5 = None
self.dev_num6 = None
self.dev_num7 = None
self.eci = None
self.eeprom_size = None
self.engineer_use = None
self.fru_major_type = None
self.fru_minor_type = None
self.hw_version = None
self.hwid = None
self.idprom_format_rev = None
self.mac_add_blk_size1 = None
self.mac_add_blk_size2 = None
self.mac_add_blk_size3 = None
self.mac_add_blk_size4 = None
self.manu_test_data = None
self.mfg_bits = None
self.mfg_deviation = None
self.oem_string = None
self.part_number = None
self.part_revision = None
self.pca_num = None
self.pcavid = None
self.pcb_serial_num = None
self.pid = None
self.power_consumption = None
self.power_supply_type = None
self.product_id = None
self.rma = Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information.Rma()
self.rma.parent = self
self.rma_code = None
self.serial_number = None
self.snmpoid = None
self.top_assem_part_num = None
self.top_assem_vid = None
self.udi_description = None
self.udi_name = None
self.vid = None
class Rma(object):
"""
RMA Data
.. attribute:: rma_history
RMA history
**type**\: str
**length:** 0..255
.. attribute:: rma_number
RMA tracking number format is N\-N\-N
**type**\: str
**length:** 0..255
.. attribute:: test_history
Test history
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rma_history = None
self.rma_number = None
self.test_history = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:rma'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rma_history is not None:
return True
if self.rma_number is not None:
return True
if self.test_history is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information.Rma']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:information'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.asset_alias is not None:
return True
if self.asset_id is not None:
return True
if self.base_mac_address1 is not None:
return True
if self.base_mac_address2 is not None:
return True
if self.base_mac_address3 is not None:
return True
if self.base_mac_address4 is not None:
return True
if self.block_checksum is not None:
return True
if self.block_count is not None:
return True
if self.block_length is not None:
return True
if self.block_signature is not None:
return True
if self.block_version is not None:
return True
if self.chassis_sid is not None:
return True
if self.clei is not None:
return True
if self.controller_family is not None:
return True
if self.controller_type is not None:
return True
if self.description is not None:
return True
if self.dev_num1 is not None:
return True
if self.dev_num2 is not None:
return True
if self.dev_num3 is not None:
return True
if self.dev_num4 is not None:
return True
if self.dev_num5 is not None:
return True
if self.dev_num6 is not None:
return True
if self.dev_num7 is not None:
return True
if self.eci is not None:
return True
if self.eeprom_size is not None:
return True
if self.engineer_use is not None:
return True
if self.fru_major_type is not None:
return True
if self.fru_minor_type is not None:
return True
if self.hw_version is not None:
return True
if self.hwid is not None:
return True
if self.idprom_format_rev is not None:
return True
if self.mac_add_blk_size1 is not None:
return True
if self.mac_add_blk_size2 is not None:
return True
if self.mac_add_blk_size3 is not None:
return True
if self.mac_add_blk_size4 is not None:
return True
if self.manu_test_data is not None:
return True
if self.mfg_bits is not None:
return True
if self.mfg_deviation is not None:
return True
if self.oem_string is not None:
return True
if self.part_number is not None:
return True
if self.part_revision is not None:
return True
if self.pca_num is not None:
return True
if self.pcavid is not None:
return True
if self.pcb_serial_num is not None:
return True
if self.pid is not None:
return True
if self.power_consumption is not None:
return True
if self.power_supply_type is not None:
return True
if self.product_id is not None:
return True
if self.rma is not None and self.rma._has_data():
return True
if self.rma_code is not None:
return True
if self.serial_number is not None:
return True
if self.snmpoid is not None:
return True
if self.top_assem_part_num is not None:
return True
if self.top_assem_vid is not None:
return True
if self.udi_description is not None:
return True
if self.udi_name is not None:
return True
if self.vid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans.Information']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.fans_name is None:
raise YPYModelError('Key property fans_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:fans[Cisco-IOS-XR-sdr-invmgr-diag-oper:fans-name = ' + str(self.fans_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.fans_name is not None:
return True
if self.information is not None and self.information._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies.FanTray.Fanses.Fans']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:fanses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.fans is not None:
for child_ref in self.fans:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies.FanTray.Fanses']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.fan_tray_name is None:
raise YPYModelError('Key property fan_tray_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:fan-tray[Cisco-IOS-XR-sdr-invmgr-diag-oper:fan-tray-name = ' + str(self.fan_tray_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.fan_tray_name is not None:
return True
if self.fanses is not None and self.fanses._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies.FanTray']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:fan-traies'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.fan_tray is not None:
for child_ref in self.fan_tray:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.FanTraies']['meta_info']
class Slots(object):
"""
Table of slots
.. attribute:: slot
Slot name
**type**\: list of :py:class:`Slot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.slot = YList()
self.slot.parent = self
self.slot.name = 'slot'
class Slot(object):
"""
Slot name
.. attribute:: slot_name <key>
Slot name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: instances
Table of instances
**type**\: :py:class:`Instances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot.Instances>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.slot_name = None
self.instances = Diag.Racks.Rack.Slots.Slot.Instances()
self.instances.parent = self
class Instances(object):
"""
Table of instances
.. attribute:: instance
instance number
**type**\: list of :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot.Instances.Instance>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.instance = YList()
self.instance.parent = self
self.instance.name = 'instance'
class Instance(object):
"""
instance number
.. attribute:: name <key>
Instance name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: detail
Detail information
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail>`
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.name = None
self.detail = Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail()
self.detail.parent = self
class Detail(object):
"""
Detail information
.. attribute:: card_instance
Card instance
**type**\: :py:class:`CardInstance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance>`
.. attribute:: node_operational_state
Node operational state
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.card_instance = Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance()
self.card_instance.parent = self
self.node_operational_state = None
class CardInstance(object):
"""
Card instance
.. attribute:: asset_alias
Asset Alias
**type**\: str
**length:** 0..255
.. attribute:: asset_id
Asset ID
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address1
Base Mac Address #1
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address2
Base Mac Address #2
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address3
Base Mac Address #3
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address4
Base Mac Address #4
**type**\: str
**length:** 0..255
.. attribute:: block_checksum
Block Checksum
**type**\: str
**length:** 0..255
.. attribute:: block_count
Block Count
**type**\: str
**length:** 0..255
.. attribute:: block_length
Block Length
**type**\: str
**length:** 0..255
.. attribute:: block_signature
Block Signature
**type**\: str
**length:** 0..255
.. attribute:: block_version
Block Version
**type**\: str
**length:** 0..255
.. attribute:: chassis_sid
Chassis serial number
**type**\: str
**length:** 0..255
.. attribute:: clei
Common Language Equipment Identifier (CLEI) code
**type**\: str
**length:** 0..255
.. attribute:: controller_family
Controller family
**type**\: str
**length:** 0..255
.. attribute:: controller_type
Controller type
**type**\: str
**length:** 0..255
.. attribute:: description
A textual description of physical entity
**type**\: str
**length:** 0..255
.. attribute:: dev_num1
Deviation Number # 1
**type**\: str
**length:** 0..255
.. attribute:: dev_num2
Deviation Number # 2
**type**\: str
**length:** 0..255
.. attribute:: dev_num3
Deviation Number # 3
**type**\: str
**length:** 0..255
.. attribute:: dev_num4
Deviation Number # 4
**type**\: str
**length:** 0..255
.. attribute:: dev_num5
Deviation Number # 5
**type**\: str
**length:** 0..255
.. attribute:: dev_num6
Deviation Number # 6
**type**\: str
**length:** 0..255
.. attribute:: dev_num7
Deviation Number # 7
**type**\: str
**length:** 0..255
.. attribute:: eci
Equipment Catalog Item (ECI) number
**type**\: str
**length:** 0..255
.. attribute:: eeprom_size
EEPROM Size
**type**\: str
**length:** 0..255
.. attribute:: engineer_use
Engineer Use
**type**\: str
**length:** 0..255
.. attribute:: fru_major_type
FRU Major Type
**type**\: str
**length:** 0..255
.. attribute:: fru_minor_type
FRU Minor Type
**type**\: str
**length:** 0..255
.. attribute:: hw_version
Hardware Version
**type**\: str
**length:** 0..255
.. attribute:: hwid
Hardware Revision
**type**\: str
**length:** 0..255
.. attribute:: idprom_format_rev
IDPROM Format Revision
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size1
Mac Address Block Size #1
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size2
Mac Address Block Size #2
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size3
Mac Address Block Size #3
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size4
Mac Address Block Size #4
**type**\: str
**length:** 0..255
.. attribute:: manu_test_data
Manufacturing Test Data
**type**\: str
**length:** 0..255
.. attribute:: mfg_bits
MFG Bits
**type**\: str
**length:** 0..255
.. attribute:: mfg_deviation
MFG Deviation
**type**\: str
**length:** 0..255
.. attribute:: oem_string
OEM String
**type**\: str
**length:** 0..255
.. attribute:: part_number
Part Number
**type**\: str
**length:** 0..255
.. attribute:: part_revision
Part Revision
**type**\: str
**length:** 0..255
.. attribute:: pca_num
PCA number
**type**\: str
**length:** 0..255
.. attribute:: pcavid
PCA revision ID
**type**\: str
**length:** 0..255
.. attribute:: pcb_serial_num
PCB Serial Number
**type**\: str
**length:** 0..255
.. attribute:: pid
Product ID
**type**\: str
**length:** 0..255
.. attribute:: power_consumption
Power Consumption
**type**\: str
**length:** 0..255
.. attribute:: power_supply_type
Power Supply Type
**type**\: str
**length:** 0..255
.. attribute:: product_id
Product ID
**type**\: str
**length:** 0..255
.. attribute:: rma
RMA Data
**type**\: :py:class:`Rma <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance.Rma>`
.. attribute:: rma_code
RMA Code
**type**\: str
**length:** 0..255
.. attribute:: serial_number
Serial Number
**type**\: str
**length:** 0..255
.. attribute:: snmpoid
SNMP OID
**type**\: str
**length:** 0..255
.. attribute:: top_assem_part_num
Top assembly part number
**type**\: str
**length:** 0..255
.. attribute:: top_assem_vid
Top assembly revision number
**type**\: str
**length:** 0..255
.. attribute:: udi_description
UDI description
**type**\: str
**length:** 0..255
.. attribute:: udi_name
UDI name
**type**\: str
**length:** 0..255
.. attribute:: vid
Version ID
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.asset_alias = None
self.asset_id = None
self.base_mac_address1 = None
self.base_mac_address2 = None
self.base_mac_address3 = None
self.base_mac_address4 = None
self.block_checksum = None
self.block_count = None
self.block_length = None
self.block_signature = None
self.block_version = None
self.chassis_sid = None
self.clei = None
self.controller_family = None
self.controller_type = None
self.description = None
self.dev_num1 = None
self.dev_num2 = None
self.dev_num3 = None
self.dev_num4 = None
self.dev_num5 = None
self.dev_num6 = None
self.dev_num7 = None
self.eci = None
self.eeprom_size = None
self.engineer_use = None
self.fru_major_type = None
self.fru_minor_type = None
self.hw_version = None
self.hwid = None
self.idprom_format_rev = None
self.mac_add_blk_size1 = None
self.mac_add_blk_size2 = None
self.mac_add_blk_size3 = None
self.mac_add_blk_size4 = None
self.manu_test_data = None
self.mfg_bits = None
self.mfg_deviation = None
self.oem_string = None
self.part_number = None
self.part_revision = None
self.pca_num = None
self.pcavid = None
self.pcb_serial_num = None
self.pid = None
self.power_consumption = None
self.power_supply_type = None
self.product_id = None
self.rma = Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance.Rma()
self.rma.parent = self
self.rma_code = None
self.serial_number = None
self.snmpoid = None
self.top_assem_part_num = None
self.top_assem_vid = None
self.udi_description = None
self.udi_name = None
self.vid = None
class Rma(object):
"""
RMA Data
.. attribute:: rma_history
RMA history
**type**\: str
**length:** 0..255
.. attribute:: rma_number
RMA tracking number format is N\-N\-N
**type**\: str
**length:** 0..255
.. attribute:: test_history
Test history
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rma_history = None
self.rma_number = None
self.test_history = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:rma'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rma_history is not None:
return True
if self.rma_number is not None:
return True
if self.test_history is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance.Rma']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:card-instance'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.asset_alias is not None:
return True
if self.asset_id is not None:
return True
if self.base_mac_address1 is not None:
return True
if self.base_mac_address2 is not None:
return True
if self.base_mac_address3 is not None:
return True
if self.base_mac_address4 is not None:
return True
if self.block_checksum is not None:
return True
if self.block_count is not None:
return True
if self.block_length is not None:
return True
if self.block_signature is not None:
return True
if self.block_version is not None:
return True
if self.chassis_sid is not None:
return True
if self.clei is not None:
return True
if self.controller_family is not None:
return True
if self.controller_type is not None:
return True
if self.description is not None:
return True
if self.dev_num1 is not None:
return True
if self.dev_num2 is not None:
return True
if self.dev_num3 is not None:
return True
if self.dev_num4 is not None:
return True
if self.dev_num5 is not None:
return True
if self.dev_num6 is not None:
return True
if self.dev_num7 is not None:
return True
if self.eci is not None:
return True
if self.eeprom_size is not None:
return True
if self.engineer_use is not None:
return True
if self.fru_major_type is not None:
return True
if self.fru_minor_type is not None:
return True
if self.hw_version is not None:
return True
if self.hwid is not None:
return True
if self.idprom_format_rev is not None:
return True
if self.mac_add_blk_size1 is not None:
return True
if self.mac_add_blk_size2 is not None:
return True
if self.mac_add_blk_size3 is not None:
return True
if self.mac_add_blk_size4 is not None:
return True
if self.manu_test_data is not None:
return True
if self.mfg_bits is not None:
return True
if self.mfg_deviation is not None:
return True
if self.oem_string is not None:
return True
if self.part_number is not None:
return True
if self.part_revision is not None:
return True
if self.pca_num is not None:
return True
if self.pcavid is not None:
return True
if self.pcb_serial_num is not None:
return True
if self.pid is not None:
return True
if self.power_consumption is not None:
return True
if self.power_supply_type is not None:
return True
if self.product_id is not None:
return True
if self.rma is not None and self.rma._has_data():
return True
if self.rma_code is not None:
return True
if self.serial_number is not None:
return True
if self.snmpoid is not None:
return True
if self.top_assem_part_num is not None:
return True
if self.top_assem_vid is not None:
return True
if self.udi_description is not None:
return True
if self.udi_name is not None:
return True
if self.vid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail.CardInstance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.card_instance is not None and self.card_instance._has_data():
return True
if self.node_operational_state is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot.Instances.Instance.Detail']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.name is None:
raise YPYModelError('Key property name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:instance[Cisco-IOS-XR-sdr-invmgr-diag-oper:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.detail is not None and self.detail._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot.Instances.Instance']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:instances'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.instance is not None:
for child_ref in self.instance:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot.Instances']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.slot_name is None:
raise YPYModelError('Key property slot_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:slot[Cisco-IOS-XR-sdr-invmgr-diag-oper:slot-name = ' + str(self.slot_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.slot_name is not None:
return True
if self.instances is not None and self.instances._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots.Slot']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:slots'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.slot is not None:
for child_ref in self.slot:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Slots']['meta_info']
class Chassis(object):
"""
Chassis information
.. attribute:: asset_alias
Asset Alias
**type**\: str
**length:** 0..255
.. attribute:: asset_id
Asset ID
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address1
Base Mac Address #1
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address2
Base Mac Address #2
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address3
Base Mac Address #3
**type**\: str
**length:** 0..255
.. attribute:: base_mac_address4
Base Mac Address #4
**type**\: str
**length:** 0..255
.. attribute:: block_checksum
Block Checksum
**type**\: str
**length:** 0..255
.. attribute:: block_count
Block Count
**type**\: str
**length:** 0..255
.. attribute:: block_length
Block Length
**type**\: str
**length:** 0..255
.. attribute:: block_signature
Block Signature
**type**\: str
**length:** 0..255
.. attribute:: block_version
Block Version
**type**\: str
**length:** 0..255
.. attribute:: chassis_sid
Chassis serial number
**type**\: str
**length:** 0..255
.. attribute:: clei
Common Language Equipment Identifier (CLEI) code
**type**\: str
**length:** 0..255
.. attribute:: controller_family
Controller family
**type**\: str
**length:** 0..255
.. attribute:: controller_type
Controller type
**type**\: str
**length:** 0..255
.. attribute:: description
A textual description of physical entity
**type**\: str
**length:** 0..255
.. attribute:: dev_num1
Deviation Number # 1
**type**\: str
**length:** 0..255
.. attribute:: dev_num2
Deviation Number # 2
**type**\: str
**length:** 0..255
.. attribute:: dev_num3
Deviation Number # 3
**type**\: str
**length:** 0..255
.. attribute:: dev_num4
Deviation Number # 4
**type**\: str
**length:** 0..255
.. attribute:: dev_num5
Deviation Number # 5
**type**\: str
**length:** 0..255
.. attribute:: dev_num6
Deviation Number # 6
**type**\: str
**length:** 0..255
.. attribute:: dev_num7
Deviation Number # 7
**type**\: str
**length:** 0..255
.. attribute:: eci
Equipment Catalog Item (ECI) number
**type**\: str
**length:** 0..255
.. attribute:: eeprom_size
EEPROM Size
**type**\: str
**length:** 0..255
.. attribute:: engineer_use
Engineer Use
**type**\: str
**length:** 0..255
.. attribute:: fru_major_type
FRU Major Type
**type**\: str
**length:** 0..255
.. attribute:: fru_minor_type
FRU Minor Type
**type**\: str
**length:** 0..255
.. attribute:: hw_version
Hardware Version
**type**\: str
**length:** 0..255
.. attribute:: hwid
Hardware Revision
**type**\: str
**length:** 0..255
.. attribute:: idprom_format_rev
IDPROM Format Revision
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size1
Mac Address Block Size #1
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size2
Mac Address Block Size #2
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size3
Mac Address Block Size #3
**type**\: str
**length:** 0..255
.. attribute:: mac_add_blk_size4
Mac Address Block Size #4
**type**\: str
**length:** 0..255
.. attribute:: manu_test_data
Manufacturing Test Data
**type**\: str
**length:** 0..255
.. attribute:: mfg_bits
MFG Bits
**type**\: str
**length:** 0..255
.. attribute:: mfg_deviation
MFG Deviation
**type**\: str
**length:** 0..255
.. attribute:: oem_string
OEM String
**type**\: str
**length:** 0..255
.. attribute:: part_number
Part Number
**type**\: str
**length:** 0..255
.. attribute:: part_revision
Part Revision
**type**\: str
**length:** 0..255
.. attribute:: pca_num
PCA number
**type**\: str
**length:** 0..255
.. attribute:: pcavid
PCA revision ID
**type**\: str
**length:** 0..255
.. attribute:: pcb_serial_num
PCB Serial Number
**type**\: str
**length:** 0..255
.. attribute:: pid
Product ID
**type**\: str
**length:** 0..255
.. attribute:: power_consumption
Power Consumption
**type**\: str
**length:** 0..255
.. attribute:: power_supply_type
Power Supply Type
**type**\: str
**length:** 0..255
.. attribute:: product_id
Product ID
**type**\: str
**length:** 0..255
.. attribute:: rma
RMA Data
**type**\: :py:class:`Rma <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sdr_invmgr_diag_oper.Diag.Racks.Rack.Chassis.Rma>`
.. attribute:: rma_code
RMA Code
**type**\: str
**length:** 0..255
.. attribute:: serial_number
Serial Number
**type**\: str
**length:** 0..255
.. attribute:: snmpoid
SNMP OID
**type**\: str
**length:** 0..255
.. attribute:: top_assem_part_num
Top assembly part number
**type**\: str
**length:** 0..255
.. attribute:: top_assem_vid
Top assembly revision number
**type**\: str
**length:** 0..255
.. attribute:: udi_description
UDI description
**type**\: str
**length:** 0..255
.. attribute:: udi_name
UDI name
**type**\: str
**length:** 0..255
.. attribute:: vid
Version ID
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.asset_alias = None
self.asset_id = None
self.base_mac_address1 = None
self.base_mac_address2 = None
self.base_mac_address3 = None
self.base_mac_address4 = None
self.block_checksum = None
self.block_count = None
self.block_length = None
self.block_signature = None
self.block_version = None
self.chassis_sid = None
self.clei = None
self.controller_family = None
self.controller_type = None
self.description = None
self.dev_num1 = None
self.dev_num2 = None
self.dev_num3 = None
self.dev_num4 = None
self.dev_num5 = None
self.dev_num6 = None
self.dev_num7 = None
self.eci = None
self.eeprom_size = None
self.engineer_use = None
self.fru_major_type = None
self.fru_minor_type = None
self.hw_version = None
self.hwid = None
self.idprom_format_rev = None
self.mac_add_blk_size1 = None
self.mac_add_blk_size2 = None
self.mac_add_blk_size3 = None
self.mac_add_blk_size4 = None
self.manu_test_data = None
self.mfg_bits = None
self.mfg_deviation = None
self.oem_string = None
self.part_number = None
self.part_revision = None
self.pca_num = None
self.pcavid = None
self.pcb_serial_num = None
self.pid = None
self.power_consumption = None
self.power_supply_type = None
self.product_id = None
self.rma = Diag.Racks.Rack.Chassis.Rma()
self.rma.parent = self
self.rma_code = None
self.serial_number = None
self.snmpoid = None
self.top_assem_part_num = None
self.top_assem_vid = None
self.udi_description = None
self.udi_name = None
self.vid = None
class Rma(object):
"""
RMA Data
.. attribute:: rma_history
RMA history
**type**\: str
**length:** 0..255
.. attribute:: rma_number
RMA tracking number format is N\-N\-N
**type**\: str
**length:** 0..255
.. attribute:: test_history
Test history
**type**\: str
**length:** 0..255
"""
_prefix = 'sdr-invmgr-diag-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.rma_history = None
self.rma_number = None
self.test_history = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:rma'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rma_history is not None:
return True
if self.rma_number is not None:
return True
if self.test_history is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Chassis.Rma']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-sdr-invmgr-diag-oper:chassis'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.asset_alias is not None:
return True
if self.asset_id is not None:
return True
if self.base_mac_address1 is not None:
return True
if self.base_mac_address2 is not None:
return True
if self.base_mac_address3 is not None:
return True
if self.base_mac_address4 is not None:
return True
if self.block_checksum is not None:
return True
if self.block_count is not None:
return True
if self.block_length is not None:
return True
if self.block_signature is not None:
return True
if self.block_version is not None:
return True
if self.chassis_sid is not None:
return True
if self.clei is not None:
return True
if self.controller_family is not None:
return True
if self.controller_type is not None:
return True
if self.description is not None:
return True
if self.dev_num1 is not None:
return True
if self.dev_num2 is not None:
return True
if self.dev_num3 is not None:
return True
if self.dev_num4 is not None:
return True
if self.dev_num5 is not None:
return True
if self.dev_num6 is not None:
return True
if self.dev_num7 is not None:
return True
if self.eci is not None:
return True
if self.eeprom_size is not None:
return True
if self.engineer_use is not None:
return True
if self.fru_major_type is not None:
return True
if self.fru_minor_type is not None:
return True
if self.hw_version is not None:
return True
if self.hwid is not None:
return True
if self.idprom_format_rev is not None:
return True
if self.mac_add_blk_size1 is not None:
return True
if self.mac_add_blk_size2 is not None:
return True
if self.mac_add_blk_size3 is not None:
return True
if self.mac_add_blk_size4 is not None:
return True
if self.manu_test_data is not None:
return True
if self.mfg_bits is not None:
return True
if self.mfg_deviation is not None:
return True
if self.oem_string is not None:
return True
if self.part_number is not None:
return True
if self.part_revision is not None:
return True
if self.pca_num is not None:
return True
if self.pcavid is not None:
return True
if self.pcb_serial_num is not None:
return True
if self.pid is not None:
return True
if self.power_consumption is not None:
return True
if self.power_supply_type is not None:
return True
if self.product_id is not None:
return True
if self.rma is not None and self.rma._has_data():
return True
if self.rma_code is not None:
return True
if self.serial_number is not None:
return True
if self.snmpoid is not None:
return True
if self.top_assem_part_num is not None:
return True
if self.top_assem_vid is not None:
return True
if self.udi_description is not None:
return True
if self.udi_name is not None:
return True
if self.vid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack.Chassis']['meta_info']
@property
def _common_path(self):
if self.rack_name is None:
raise YPYModelError('Key property rack_name is None')
return '/Cisco-IOS-XR-sdr-invmgr-diag-oper:diag/Cisco-IOS-XR-sdr-invmgr-diag-oper:racks/Cisco-IOS-XR-sdr-invmgr-diag-oper:rack[Cisco-IOS-XR-sdr-invmgr-diag-oper:rack-name = ' + str(self.rack_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rack_name is not None:
return True
if self.chassis is not None and self.chassis._has_data():
return True
if self.fan_traies is not None and self.fan_traies._has_data():
return True
if self.power_shelfs is not None and self.power_shelfs._has_data():
return True
if self.slots is not None and self.slots._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks.Rack']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-sdr-invmgr-diag-oper:diag/Cisco-IOS-XR-sdr-invmgr-diag-oper:racks'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.rack is not None:
for child_ref in self.rack:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag.Racks']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-sdr-invmgr-diag-oper:diag'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.racks is not None and self.racks._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_sdr_invmgr_diag_oper as meta
return meta._meta_table['Diag']['meta_info']
| 42.338811
| 218
| 0.292675
| 10,312
| 162,327
| 4.421742
| 0.021916
| 0.038818
| 0.052701
| 0.080268
| 0.933548
| 0.926333
| 0.914183
| 0.907713
| 0.89348
| 0.885738
| 0
| 0.024712
| 0.653496
| 162,327
| 3,833
| 219
| 42.349857
| 0.785944
| 0.200589
| 0
| 0.875841
| 0
| 0.005984
| 0.056574
| 0.030365
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089753
| false
| 0
| 0.02169
| 0.001496
| 0.420344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6e29da76891779b5b846bb4e44a9fc01c03fde90
| 135
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/phys/Phys_Datasheet.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.nixi.phys.Phys_Datasheet import PHYS_Datasheet
class PHYS_Datasheet_Panther():
# inherit none
pass
| 16.875
| 71
| 0.785185
| 17
| 135
| 6
| 0.705882
| 0.382353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 135
| 7
| 72
| 19.285714
| 0.894737
| 0.088889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
28645235ce31332689585c61d12cc6e92e6762d1
| 3,505
|
py
|
Python
|
1.py
|
Swizec/advent-of-code-2017
|
efcdac9ddb81dd1db4a7bc0945ff4c13f6f15513
|
[
"MIT"
] | 5
|
2017-12-02T08:55:59.000Z
|
2018-01-11T05:52:18.000Z
|
1.py
|
Swizec/advent-of-code-2017
|
efcdac9ddb81dd1db4a7bc0945ff4c13f6f15513
|
[
"MIT"
] | null | null | null |
1.py
|
Swizec/advent-of-code-2017
|
efcdac9ddb81dd1db4a7bc0945ff4c13f6f15513
|
[
"MIT"
] | 1
|
2020-06-18T19:27:02.000Z
|
2020-06-18T19:27:02.000Z
|
input = "7385764686251444473997915123782972536343732657517834671759462795461213782428342931896181695578996274321317419242359534783957372932953774336338118488967172727651862498838195317654289797558683458511126996217953322817229372373455862177844478443391835484591525235651863464891177927244954925827786799436536592561374269299474738321293575385899438446558569241236278779779983587912431395475244796538888373287186921647426866237756737342731976763959499149996315591584716122199183295277439872911371313924594486766479438544417416529743495114819825984524437367225234184772617942525954961136976875325182725754768372684531972614455134523596338355374444273522115362238734383164778129376628621497662965456761631796178353599629887665939521892447361219479646483978798392716119793282717739524897385958273726776318154977675546287789874265339688753977185129334929715486381875286278528247696464162297691698154712775589541945263574897266575996455547625537947927972497979333932115165151462742216327321116291372396585618664475715321298122335789262942284571328414569375464386446824882551918843185195829547373915482687534432942778312542752798313434628498295216692646713137244198123219531693559848915834623825919191532658735422176965451741869666714874158492556445954852299161868651448123825821775363219246244515946392686275545561989355573946924767442253465342753995764791927951158771231944177692469531494559697911176613943396258141822244578457498361352381518166587583342233816989329544415621127397996723997397219676486966684729653763525768655324443991129862129181215339947555257279592921258246646215764736698583211625887436176149251356452358211458343439374688341116529726972434697324734525114192229641464227986582845477741747787673588848439713619326889624326944553386782821633538775371915973899959295232927996742218926514374168947582441892731462993481877277714436887597223871881149693228928442427611664655772333471893735932419937832937953495929514837663883938416644387342825836673733778119481514427512453357628396666791547531814844176342696362416842993761919369994779897357348334197721735231299249116477"
# 1st star
""" The captcha requires you to review a sequence of digits (your puzzle input) and find the sum of all digits that match the next digit in the list. The list is circular, so the digit after the last digit is the first digit in the list.
For example:
1122 produces a sum of 3 (1 + 2) because the first digit (1) matches the second digit and the third digit (2) matches the fourth digit.
1111 produces 4 because each digit (all 1) matches the next.
1234 produces 0 because no digit matches the next.
91212129 produces 9 because the only digit that matches the next one is the last digit, 9."""
print sum([int(input[i]) for i in xrange(len(input)) if input[(i+1)%len(input)] == input[i]])
# 2nd star
"""Now, instead of considering the next digit, it wants you to consider the digit halfway around the circular list. That is, if your list contains 10 items, only include a digit in your sum if the digit 10/2 = 5 steps forward matches it. Fortunately, your list has an even number of elements.
For example:
1212 produces 6: the list contains 4 items, and all four digits match the digit 2 items ahead.
1221 produces 0, because every comparison is between a 1 and a 2.
123425 produces 4, because both 2s match each other, but no other digit has a match.
123123 produces 12.
12131415 produces 4."""
print sum([int(input[i]) for i in xrange(len(input)) if input[(i+len(input)/2)%len(input)] == input[i]])
| 125.178571
| 2,085
| 0.894722
| 265
| 3,505
| 11.833962
| 0.369811
| 0.01148
| 0.013393
| 0.008929
| 0.028699
| 0.028699
| 0.028699
| 0.028699
| 0.028699
| 0.028699
| 0
| 0.663482
| 0.07418
| 3,505
| 27
| 2,086
| 129.814815
| 0.302928
| 0.00485
| 0
| 0
| 0
| 0
| 0.904492
| 0.904492
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.666667
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
286d4ab5cd455c38b610278ac1817db0ca31093c
| 35
|
py
|
Python
|
src/project_name/__init__.py
|
hlop3z/dexter
|
5ee7306597f02088c249e37338d8ddc0b1c21fda
|
[
"MIT"
] | null | null | null |
src/project_name/__init__.py
|
hlop3z/dexter
|
5ee7306597f02088c249e37338d8ddc0b1c21fda
|
[
"MIT"
] | null | null | null |
src/project_name/__init__.py
|
hlop3z/dexter
|
5ee7306597f02088c249e37338d8ddc0b1c21fda
|
[
"MIT"
] | null | null | null |
from . import pk1
from . import pk2
| 17.5
| 17
| 0.742857
| 6
| 35
| 4.333333
| 0.666667
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.2
| 35
| 2
| 18
| 17.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
28979b03e5f34ad70e5128fc60499933a2c5179a
| 167
|
py
|
Python
|
Python Boostcamp/ep1/Hello.py
|
dunkdink/PytonBootcamp2021
|
a5a75cbba085e539b43927c6ce4191a1b904ce75
|
[
"MIT"
] | null | null | null |
Python Boostcamp/ep1/Hello.py
|
dunkdink/PytonBootcamp2021
|
a5a75cbba085e539b43927c6ce4191a1b904ce75
|
[
"MIT"
] | null | null | null |
Python Boostcamp/ep1/Hello.py
|
dunkdink/PytonBootcamp2021
|
a5a75cbba085e539b43927c6ce4191a1b904ce75
|
[
"MIT"
] | null | null | null |
print('Hello Dunk') #ฟังก์ชั่นprint
#print('Hello Wolrd')
#print('Hello Wolrd')
#print('Hello Wolrd')
#print('Hello Wolrd')
'''
คอมเมนต์ หลายบรรทัด
'''
| 16.7
| 37
| 0.598802
| 27
| 167
| 3.925926
| 0.518519
| 0.471698
| 0.566038
| 0.566038
| 0.566038
| 0.566038
| 0.566038
| 0.566038
| 0.566038
| 0
| 0
| 0
| 0.179641
| 167
| 9
| 38
| 18.555556
| 0.729927
| 0.568862
| 0
| 0
| 0
| 0
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
95cf47203e18cdcf9016837b12268aaf871a54d5
| 45,628
|
py
|
Python
|
keras/layers/dropoutrnn.py
|
volkancirik/keras
|
a32a4c2ecdb2b0e528fb45e0942d5262ffcd735b
|
[
"MIT"
] | 2
|
2018-06-08T13:17:06.000Z
|
2020-02-13T09:34:43.000Z
|
keras/layers/dropoutrnn.py
|
volkancirik/keras
|
a32a4c2ecdb2b0e528fb45e0942d5262ffcd735b
|
[
"MIT"
] | null | null | null |
keras/layers/dropoutrnn.py
|
volkancirik/keras
|
a32a4c2ecdb2b0e528fb45e0942d5262ffcd735b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
from .. import activations, initializations, regularizers
from ..utils.theano_utils import shared_scalar, shared_zeros, alloc_zeros_matrix
from ..layers.core import Layer, MaskedLayer
from ..layers.recurrent import Recurrent
from six.moves import range
from ..regularizers import l2
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
class DropoutbiGRU(Recurrent):
def __init__(self, input_dim, output_dim=128, internal_dim = 64,
init='glorot_uniform', inner_init='orthogonal',
activation='sigmoid', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer = l2(0.0001), U_regularizer = l2(0.0001), b_regularizer = l2(0.0001),
dropout = 0.5):
super(DropoutbiGRU, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.internal_dim = output_dim / 2
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.dropout = dropout
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_z = self.init((self.input_dim, self.internal_dim))
self.U_z = self.inner_init((self.internal_dim, self.internal_dim))
self.b_z = shared_zeros((self.internal_dim))
self.W_r = self.init((self.input_dim, self.internal_dim))
self.U_r = self.inner_init((self.internal_dim, self.internal_dim))
self.b_r = shared_zeros((self.internal_dim))
self.W_h = self.init((self.input_dim, self.internal_dim))
self.U_h = self.inner_init((self.internal_dim, self.internal_dim))
self.b_h = shared_zeros((self.internal_dim))
self.params = [
self.W_z, self.U_z, self.b_z,
self.W_r, self.U_r, self.b_r,
self.W_h, self.U_h, self.b_h,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_z, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_r, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_h, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_z, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_r, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_h, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_z, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_r, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_h, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xz_t, xr_t, xh_t, mask_tm1,
h_tm1, z_tm1, r_tm1,
u_z, u_r, u_h, B_U):
h_mask_tm1 = mask_tm1 * h_tm1
z = self.inner_activation(xz_t + T.dot(h_mask_tm1 * B_U[0], u_z))
r = self.inner_activation(xr_t + T.dot(h_mask_tm1 * B_U[1], u_r))
hh_t = self.activation(xh_t + T.dot(r * h_mask_tm1 * B_U[2], u_h))
h_t = z * h_mask_tm1 + (1 - z) * hh_t
z_t = z
r_t = r
return h_t, z_t, r_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((3, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((3, X.shape[1], self.internal_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(3, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(3, dtype=theano.config.floatX) * retain_prob_U
x_z = T.dot(X * B_W[0], self.W_z) + self.b_z
x_r = T.dot(X * B_W[1], self.W_r) + self.b_r
x_h = T.dot(X * B_W[2], self.W_h) + self.b_h
[fw_outputs, fw_z, fw_r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient)
[bw_outputs, bw_z, bw_r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient,go_backwards = True)
outputs = T.concatenate([fw_outputs, bw_outputs], axis = 2)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_details(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((3, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((3, X.shape[1], self.internal_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(3, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(3, dtype=theano.config.floatX) * retain_prob_U
x_z = T.dot(X * B_W[0], self.W_z) + self.b_z
x_r = T.dot(X * B_W[1], self.W_r) + self.b_r
x_h = T.dot(X * B_W[2], self.W_h) + self.b_h
[fw_outputs, fw_z, fw_r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient)
[bw_outputs, bw_z, bw_r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient,go_backwards = True)
outputs = T.concatenate([fw_outputs, bw_outputs], axis = 2)
z = T.concatenate([fw_z, bw_z], axis = 2)
r = T.concatenate([fw_r, bw_r], axis = 2)
return outputs.dimshuffle((1, 0, 2)), z.dimshuffle((1, 0, 2)), r.dimshuffle((1, 0, 2))
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"internal_dim": self.internal_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"W_regularizer": self.W_regularizer.get_config() if self.W_regularizer else None,
"U_regularizer": self.U_regularizer.get_config() if self.b_regularizer else None,
"b_regularizer": self.b_regularizer.get_config() if self.b_regularizer else None,
"dropout": self.dropout}
class DropoutbiLSTM(Recurrent):
def __init__(self, input_dim, output_dim = 128, internal_dim = 64,
init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one',
activation='tanh', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer = l2(0.0001), U_regularizer = l2(0.0001), b_regularizer = l2(0.0001),
dropout = 0.5):
super(DropoutbiLSTM, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.internal_dim = output_dim / 2
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.dropout = dropout
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.forget_bias_init = initializations.get(forget_bias_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_i = self.init((self.input_dim, self.internal_dim))
self.U_i = self.inner_init((self.internal_dim, self.internal_dim))
self.b_i = shared_zeros((self.internal_dim))
self.W_f = self.init((self.input_dim, self.internal_dim))
self.U_f = self.inner_init((self.internal_dim, self.internal_dim))
self.b_f = self.forget_bias_init((self.internal_dim))
self.W_c = self.init((self.input_dim, self.internal_dim))
self.U_c = self.inner_init((self.internal_dim, self.internal_dim))
self.b_c = shared_zeros((self.internal_dim))
self.W_o = self.init((self.input_dim, self.internal_dim))
self.U_o = self.inner_init((self.internal_dim, self.internal_dim))
self.b_o = shared_zeros((self.internal_dim))
self.params = [
self.W_i, self.U_i, self.b_i,
self.W_c, self.U_c, self.b_c,
self.W_f, self.U_f, self.b_f,
self.W_o, self.U_o, self.b_o,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_i, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_f, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_c, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_o, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_i, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_f, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_c, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_o, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_i, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_f, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_c, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_o, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xi_t, xf_t, xo_t, xc_t, mask_tm1,
h_tm1, c_tm1, i_tm1, f_tm1, o_tm1,
u_i, u_f, u_o, u_c, B_U):
h_mask_tm1 = mask_tm1 * h_tm1
c_mask_tm1 = mask_tm1 * c_tm1
i_t = self.inner_activation(xi_t + T.dot(h_mask_tm1 * B_U[0], u_i))
f_t = self.inner_activation(xf_t + T.dot(h_mask_tm1 * B_U[1], u_f))
c_t = f_t * c_mask_tm1 + i_t * self.activation(xc_t + T.dot(h_mask_tm1 * B_U[2], u_c))
o_t = self.inner_activation(xo_t + T.dot(h_mask_tm1 * B_U[3], u_o))
h_t = o_t * self.activation(c_t)
return h_t, c_t, i_t, f_t, o_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((4, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((4, X.shape[1], self.internal_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(4, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(4, dtype=theano.config.floatX) * retain_prob_U
xi = T.dot(X * B_W[0], self.W_i) + self.b_i
xf = T.dot(X * B_W[1], self.W_f) + self.b_f
xc = T.dot(X * B_W[2], self.W_c) + self.b_c
xo = T.dot(X * B_W[3], self.W_o) + self.b_o
[fw_outputs, fw_memories, fw_gate_i, fw_gate_f, fw_gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient)
[bw_outputs, bw_memories, bw_gate_i, bw_gate_f, bw_gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient,
go_backwards = True)
outputs = T.concatenate([fw_outputs, bw_outputs], axis = 2)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_details(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((4, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((4, X.shape[1], self.internal_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(4, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(4, dtype=theano.config.floatX) * retain_prob_U
xi = T.dot(X * B_W[0], self.W_i) + self.b_i
xf = T.dot(X * B_W[1], self.W_f) + self.b_f
xc = T.dot(X * B_W[2], self.W_c) + self.b_c
xo = T.dot(X * B_W[3], self.W_o) + self.b_o
[fw_outputs, fw_memories, fw_gate_i, fw_gate_f, fw_gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient)
[bw_outputs, bw_memories, bw_gate_i, bw_gate_f, bw_gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.internal_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient,
go_backwards = True)
outputs = T.concatenate([fw_outputs, bw_outputs], axis = 2)
memories = T.concatenate([fw_memories,bw_memories], axis = 2)
gate_i = T.concatenate([fw_gate_i,bw_gate_i], axis = 2)
gate_f = T.concatenate([fw_gate_f,bw_gate_f], axis = 2)
gate_o = T.concatenate([fw_gate_o,bw_gate_o], axis = 2)
return outputs.dimshuffle((1, 0, 2)), memories.dimshuffle((1, 0, 2)), gate_i.dimshuffle((1, 0, 2)), gate_f.dimshuffle((1, 0, 2)), gate_o.dimshuffle((1, 0, 2))
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"internal_dim": self.internal_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"forget_bias_init": self.forget_bias_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"W_regularizer": self.W_regularizer.get_config() if self.W_regularizer else None,
"U_regularizer": self.U_regularizer.get_config() if self.b_regularizer else None,
"b_regularizer": self.b_regularizer.get_config() if self.b_regularizer else None,
"dropout": self.dropout
}
class DropoutGRU(Recurrent):
def __init__(self, input_dim, output_dim=128,
init='glorot_uniform', inner_init='orthogonal',
activation='sigmoid', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer = l2(0.0001), U_regularizer = l2(0.0001), b_regularizer = l2(0.0001),
dropout = 0.5):
super(DropoutGRU, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.dropout = dropout
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_z = self.init((self.input_dim, self.output_dim))
self.U_z = self.inner_init((self.output_dim, self.output_dim))
self.b_z = shared_zeros((self.output_dim))
self.W_r = self.init((self.input_dim, self.output_dim))
self.U_r = self.inner_init((self.output_dim, self.output_dim))
self.b_r = shared_zeros((self.output_dim))
self.W_h = self.init((self.input_dim, self.output_dim))
self.U_h = self.inner_init((self.output_dim, self.output_dim))
self.b_h = shared_zeros((self.output_dim))
self.params = [
self.W_z, self.U_z, self.b_z,
self.W_r, self.U_r, self.b_r,
self.W_h, self.U_h, self.b_h,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_z, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_r, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_h, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_z, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_r, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_h, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_z, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_r, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_h, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xz_t, xr_t, xh_t, mask_tm1,
h_tm1, z_tm1, r_tm1,
u_z, u_r, u_h, B_U):
h_mask_tm1 = mask_tm1 * h_tm1
z = self.inner_activation(xz_t + T.dot(h_mask_tm1 * B_U[0], u_z))
r = self.inner_activation(xr_t + T.dot(h_mask_tm1 * B_U[1], u_r))
hh_t = self.activation(xh_t + T.dot(r * h_mask_tm1 * B_U[2], u_h))
h_t = z * h_mask_tm1 + (1 - z) * hh_t
z_t = z
r_t = r
return h_t, z_t, r_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((3, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((3, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(3, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(3, dtype=theano.config.floatX) * retain_prob_U
x_z = T.dot(X * B_W[0], self.W_z) + self.b_z
x_r = T.dot(X * B_W[1], self.W_r) + self.b_r
x_h = T.dot(X * B_W[2], self.W_h) + self.b_h
[outputs, z, r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_details(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((3, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((3, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(3, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(3, dtype=theano.config.floatX) * retain_prob_U
x_z = T.dot(X * B_W[0], self.W_z) + self.b_z
x_r = T.dot(X * B_W[1], self.W_r) + self.b_r
x_h = T.dot(X * B_W[2], self.W_h) + self.b_h
[outputs, z, r], updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask],
outputs_info=[T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)],
non_sequences=[self.U_z, self.U_r, self.U_h, B_U],
truncate_gradient=self.truncate_gradient)
return outputs.dimshuffle((1, 0, 2)), z.dimshuffle((1, 0, 2)), r.dimshuffle((1, 0, 2))
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"W_regularizer": self.W_regularizer.get_config() if self.W_regularizer else None,
"U_regularizer": self.U_regularizer.get_config() if self.b_regularizer else None,
"b_regularizer": self.b_regularizer.get_config() if self.b_regularizer else None,
# "W_regularizer": self.W_regularizer,
# "U_regularizer": self.U_regularizer,
# "b_regularizer": self.b_regularizer,
"dropout": self.dropout}
class NaiveDropoutGRU(Recurrent):
def __init__(self, input_dim, output_dim=128,
init='glorot_uniform', inner_init='orthogonal',
activation='sigmoid', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer=None, U_regularizer=None, b_regularizer=None,
p_W=0.5, p_U=0.5):
super(NaiveDropoutGRU, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.p_W, self.p_U = shared_zeros((1)), shared_zeros((1))
self.p_W.set_value(np.array([p_W], dtype=theano.config.floatX))
self.p_U.set_value(np.array([p_U], dtype=theano.config.floatX))
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_z = self.init((self.input_dim, self.output_dim))
self.U_z = self.inner_init((self.output_dim, self.output_dim))
self.b_z = shared_zeros((self.output_dim))
self.W_r = self.init((self.input_dim, self.output_dim))
self.U_r = self.inner_init((self.output_dim, self.output_dim))
self.b_r = shared_zeros((self.output_dim))
self.W_h = self.init((self.input_dim, self.output_dim))
self.U_h = self.inner_init((self.output_dim, self.output_dim))
self.b_h = shared_zeros((self.output_dim))
self.params = [
self.W_z, self.U_z, self.b_z,
self.W_r, self.U_r, self.b_r,
self.W_h, self.U_h, self.b_h,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_z, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_r, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_h, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_z, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_r, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_h, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_z, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_r, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_h, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xz_t, xr_t, xh_t, mask_tm1, B_U,
h_tm1,
u_z, u_r, u_h):
h_mask_tm1 = mask_tm1 * h_tm1
z = self.inner_activation(xz_t + T.dot(h_mask_tm1 * B_U[0], u_z))
r = self.inner_activation(xr_t + T.dot(h_mask_tm1 * B_U[1], u_r))
hh_t = self.activation(xh_t + T.dot(r * h_mask_tm1 * B_U[2], u_h))
h_t = z * h_mask_tm1 + (1 - z) * hh_t
return h_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.p_W[0]
retain_prob_U = 1. - self.p_U[0]
if train:
B_W = self.srng.binomial((3, X.shape[0], X.shape[1], X.shape[2]),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((X.shape[0], 3, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(3, dtype=theano.config.floatX) * retain_prob_W
B_U = T.ones((X.shape[0], 3), dtype=theano.config.floatX) * retain_prob_U
x_z = T.dot(X * B_W[0], self.W_z) + self.b_z
x_r = T.dot(X * B_W[1], self.W_r) + self.b_r
x_h = T.dot(X * B_W[2], self.W_h) + self.b_h
outputs, updates = theano.scan(
self._step,
sequences=[x_z, x_r, x_h, padded_mask, B_U],
outputs_info=T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
non_sequences=[self.U_z, self.U_r, self.U_h],
truncate_gradient=self.truncate_gradient)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"W_regularizer": self.W_regularizer,
"U_regularizer": self.U_regularizer,
"b_regularizer": self.b_regularizer,
"p_W": self.p_W,
"p_U": self.p_U}
class DropoutLSTM(Recurrent):
def __init__(self, input_dim, output_dim=128,
init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one',
activation='tanh', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer = l2(0.0001), U_regularizer = l2(0.0001), b_regularizer = l2(0.0001),
# W_regularizer = None, U_regularizer = None, b_regularizer = None,
dropout = 0.5):
super(DropoutLSTM, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.dropout = dropout
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.forget_bias_init = initializations.get(forget_bias_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_i = self.init((self.input_dim, self.output_dim))
self.U_i = self.inner_init((self.output_dim, self.output_dim))
self.b_i = shared_zeros((self.output_dim))
self.W_f = self.init((self.input_dim, self.output_dim))
self.U_f = self.inner_init((self.output_dim, self.output_dim))
self.b_f = self.forget_bias_init((self.output_dim))
self.W_c = self.init((self.input_dim, self.output_dim))
self.U_c = self.inner_init((self.output_dim, self.output_dim))
self.b_c = shared_zeros((self.output_dim))
self.W_o = self.init((self.input_dim, self.output_dim))
self.U_o = self.inner_init((self.output_dim, self.output_dim))
self.b_o = shared_zeros((self.output_dim))
self.params = [
self.W_i, self.U_i, self.b_i,
self.W_c, self.U_c, self.b_c,
self.W_f, self.U_f, self.b_f,
self.W_o, self.U_o, self.b_o,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_i, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_f, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_c, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_o, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_i, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_f, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_c, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_o, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_i, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_f, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_c, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_o, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xi_t, xf_t, xo_t, xc_t, mask_tm1,
h_tm1, c_tm1, i_tm1, f_tm1, o_tm1,
u_i, u_f, u_o, u_c, B_U):
h_mask_tm1 = mask_tm1 * h_tm1
c_mask_tm1 = mask_tm1 * c_tm1
i_t = self.inner_activation(xi_t + T.dot(h_mask_tm1 * B_U[0], u_i))
f_t = self.inner_activation(xf_t + T.dot(h_mask_tm1 * B_U[1], u_f))
c_t = f_t * c_mask_tm1 + i_t * self.activation(xc_t + T.dot(h_mask_tm1 * B_U[2], u_c))
o_t = self.inner_activation(xo_t + T.dot(h_mask_tm1 * B_U[3], u_o))
h_t = o_t * self.activation(c_t)
return h_t, c_t, i_t, f_t, o_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((4, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((4, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(4, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(4, dtype=theano.config.floatX) * retain_prob_U
xi = T.dot(X * B_W[0], self.W_i) + self.b_i
xf = T.dot(X * B_W[1], self.W_f) + self.b_f
xc = T.dot(X * B_W[2], self.W_c) + self.b_c
xo = T.dot(X * B_W[3], self.W_o) + self.b_o
[outputs, memories, gate_i, gate_f, gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_details(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
retain_prob_W = 1. - self.dropout
retain_prob_U = 1. - self.dropout
if train:
B_W = self.srng.binomial((4, X.shape[1], self.input_dim),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((4, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = np.ones(4, dtype=theano.config.floatX) * retain_prob_W
B_U = np.ones(4, dtype=theano.config.floatX) * retain_prob_U
xi = T.dot(X * B_W[0], self.W_i) + self.b_i
xf = T.dot(X * B_W[1], self.W_f) + self.b_f
xc = T.dot(X * B_W[2], self.W_c) + self.b_c
xo = T.dot(X * B_W[3], self.W_o) + self.b_o
[outputs, memories, gate_i, gate_f, gate_o], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, B_U],
truncate_gradient=self.truncate_gradient)
return outputs.dimshuffle((1, 0, 2)), memories.dimshuffle((1, 0, 2)), gate_i.dimshuffle((1, 0, 2)), gate_f.dimshuffle((1, 0, 2)), gate_o.dimshuffle((1, 0, 2))
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"forget_bias_init": self.forget_bias_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"W_regularizer": self.W_regularizer.get_config() if self.W_regularizer else None,
"U_regularizer": self.U_regularizer.get_config() if self.b_regularizer else None,
"b_regularizer": self.b_regularizer.get_config() if self.b_regularizer else None,
# "W_regularizer": self.W_regularizer,
# "U_regularizer": self.U_regularizer,
# "b_regularizer": self.b_regularizer,
"dropout": self.dropout
}
class MultiplicativeGaussianNoiseLSTM(Recurrent):
def __init__(self, input_dim, output_dim=128,
init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one',
activation='tanh', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
p_W=0.5, p_U=0.5):
super(MultiplicativeGaussianNoiseLSTM, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.p_W, self.p_U = shared_zeros((1)), shared_zeros((1))
self.p_W.set_value(np.array([p_W], dtype=theano.config.floatX))
self.p_U.set_value(np.array([p_U], dtype=theano.config.floatX))
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.forget_bias_init = initializations.get(forget_bias_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_i = self.init((self.input_dim, self.output_dim))
self.U_i = self.inner_init((self.output_dim, self.output_dim))
self.b_i = shared_zeros((self.output_dim))
self.W_f = self.init((self.input_dim, self.output_dim))
self.U_f = self.inner_init((self.output_dim, self.output_dim))
self.b_f = self.forget_bias_init((self.output_dim))
self.W_c = self.init((self.input_dim, self.output_dim))
self.U_c = self.inner_init((self.output_dim, self.output_dim))
self.b_c = shared_zeros((self.output_dim))
self.W_o = self.init((self.input_dim, self.output_dim))
self.U_o = self.inner_init((self.output_dim, self.output_dim))
self.b_o = shared_zeros((self.output_dim))
self.params = [
self.W_i, self.U_i, self.b_i,
self.W_c, self.U_c, self.b_c,
self.W_f, self.U_f, self.b_f,
self.W_o, self.U_o, self.b_o,
]
if weights is not None:
self.set_weights(weights)
def _step(self,
xi_t, xf_t, xo_t, xc_t, mask_tm1,
h_tm1, c_tm1,
u_i, u_f, u_o, u_c, N_U):
h_mask_tm1 = mask_tm1 * h_tm1
c_mask_tm1 = mask_tm1 * c_tm1
i_t = self.inner_activation(xi_t + T.dot(h_mask_tm1 * N_U[0], u_i))
f_t = self.inner_activation(xf_t + T.dot(h_mask_tm1 * N_U[1], u_f))
c_t = f_t * c_mask_tm1 + i_t * self.activation(xc_t + T.dot(h_mask_tm1 * N_U[2], u_c))
o_t = self.inner_activation(xo_t + T.dot(h_mask_tm1 * N_U[3], u_o))
h_t = o_t * self.activation(c_t)
return h_t, c_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
std_W = T.sqrt(self.p_W[0] / (1.0 - self.p_W[0]))
std_U = T.sqrt(self.p_U[0] / (1.0 - self.p_U[0]))
if train:
N_W = self.srng.normal((4, X.shape[1], self.input_dim),
avg=1.0, std=std_W, dtype=theano.config.floatX)
N_U = self.srng.normal((4, X.shape[1], self.output_dim),
avg=1.0, std=std_U, dtype=theano.config.floatX)
else:
N_W = np.ones(4, dtype=theano.config.floatX)
N_U = np.ones(4, dtype=theano.config.floatX)
xi = T.dot(X * N_W[0], self.W_i) + self.b_i
xf = T.dot(X * N_W[1], self.W_f) + self.b_f
xc = T.dot(X * N_W[2], self.W_c) + self.b_c
xo = T.dot(X * N_W[3], self.W_o) + self.b_o
[outputs, memories], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c, N_U],
truncate_gradient=self.truncate_gradient)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"forget_bias_init": self.forget_bias_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"p_W": self.p_W,
"p_U": self.p_U}
class NaiveDropoutLSTM(Recurrent):
def __init__(self, input_dim, output_dim=128,
init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one',
activation='tanh', inner_activation='hard_sigmoid',
weights=None, truncate_gradient=-1, return_sequences=False,
W_regularizer=None, U_regularizer=None, b_regularizer=None,
p_W=0.5, p_U=0.5):
super(NaiveDropoutLSTM, self).__init__()
self.srng = RandomStreams(seed=np.random.randint(10e6))
self.input_dim = input_dim
self.output_dim = output_dim
self.truncate_gradient = truncate_gradient
self.return_sequences = return_sequences
self.p_W, self.p_U = shared_zeros((1)), shared_zeros((1))
self.p_W.set_value(np.array([p_W], dtype=theano.config.floatX))
self.p_U.set_value(np.array([p_U], dtype=theano.config.floatX))
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.forget_bias_init = initializations.get(forget_bias_init)
self.activation = activations.get(activation)
self.inner_activation = activations.get(inner_activation)
self.input = T.tensor3()
self.W_i = self.init((self.input_dim, self.output_dim))
self.U_i = self.inner_init((self.output_dim, self.output_dim))
self.b_i = shared_zeros((self.output_dim))
self.W_f = self.init((self.input_dim, self.output_dim))
self.U_f = self.inner_init((self.output_dim, self.output_dim))
self.b_f = self.forget_bias_init((self.output_dim))
self.W_c = self.init((self.input_dim, self.output_dim))
self.U_c = self.inner_init((self.output_dim, self.output_dim))
self.b_c = shared_zeros((self.output_dim))
self.W_o = self.init((self.input_dim, self.output_dim))
self.U_o = self.inner_init((self.output_dim, self.output_dim))
self.b_o = shared_zeros((self.output_dim))
self.params = [
self.W_i, self.U_i, self.b_i,
self.W_c, self.U_c, self.b_c,
self.W_f, self.U_f, self.b_f,
self.W_o, self.U_o, self.b_o,
]
self.regularizers = []
def appendRegulariser(input_regulariser, param, regularizers_list):
regulariser = regularizers.get(input_regulariser)
if regulariser:
regulariser.set_param(param)
regularizers_list.append(regulariser)
self.W_regularizer = W_regularizer
appendRegulariser(self.W_regularizer, self.W_i, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_f, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_i, self.regularizers)
appendRegulariser(self.W_regularizer, self.W_o, self.regularizers)
self.U_regularizer = U_regularizer
appendRegulariser(self.U_regularizer, self.U_i, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_f, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_i, self.regularizers)
appendRegulariser(self.U_regularizer, self.U_o, self.regularizers)
self.b_regularizer = b_regularizer
appendRegulariser(self.b_regularizer, self.b_i, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_f, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_i, self.regularizers)
appendRegulariser(self.b_regularizer, self.b_o, self.regularizers)
if weights is not None:
self.set_weights(weights)
def _step(self,
xi_t, xf_t, xo_t, xc_t, mask_tm1, B_U,
h_tm1, c_tm1,
u_i, u_f, u_o, u_c):
h_mask_tm1 = mask_tm1 * h_tm1
c_mask_tm1 = mask_tm1 * c_tm1
i_t = self.inner_activation(xi_t + T.dot(h_mask_tm1 * B_U[0], u_i))
f_t = self.inner_activation(xf_t + T.dot(h_mask_tm1 * B_U[1], u_f))
c_t = f_t * c_mask_tm1 + i_t * self.activation(xc_t + T.dot(h_mask_tm1 * B_U[2], u_c))
o_t = self.inner_activation(xo_t + T.dot(h_mask_tm1 * B_U[3], u_o))
h_t = o_t * self.activation(c_t)
return h_t, c_t
def get_output(self, train=False):
X = self.get_input(train)
padded_mask = self.get_padded_shuffled_mask(train, X, pad=1)
X = X.dimshuffle((1, 0, 2))
self.train = train
retain_prob_W = 1. - self.p_W[0]
retain_prob_U = 1. - self.p_U[0]
if train:
B_W = self.srng.binomial((4, X.shape[0], X.shape[1], X.shape[2]),
p=retain_prob_W, dtype=theano.config.floatX)
B_U = self.srng.binomial((X.shape[0], 4, X.shape[1], self.output_dim),
p=retain_prob_U, dtype=theano.config.floatX)
else:
B_W = T.ones((4), dtype=theano.config.floatX) * retain_prob_W
B_U = T.ones((X.shape[0], 4), dtype=theano.config.floatX) * retain_prob_U
xi = T.dot(X * B_W[0], self.W_i) + self.b_i
xf = T.dot(X * B_W[1], self.W_f) + self.b_f
xc = T.dot(X * B_W[2], self.W_c) + self.b_c
xo = T.dot(X * B_W[3], self.W_o) + self.b_o
[outputs, memories], updates = theano.scan(
self._step,
sequences=[xi, xf, xo, xc, padded_mask, B_U],
outputs_info=[
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1)
],
non_sequences=[self.U_i, self.U_f, self.U_o, self.U_c],
truncate_gradient=self.truncate_gradient)
if self.return_sequences:
return outputs.dimshuffle((1, 0, 2))
return outputs[-1]
def get_config(self):
return {"name": self.__class__.__name__,
"input_dim": self.input_dim,
"output_dim": self.output_dim,
"init": self.init.__name__,
"inner_init": self.inner_init.__name__,
"forget_bias_init": self.forget_bias_init.__name__,
"activation": self.activation.__name__,
"inner_activation": self.inner_activation.__name__,
"truncate_gradient": self.truncate_gradient,
"return_sequences": self.return_sequences,
"p_W": self.p_W,
"p_U": self.p_U}
| 39.989483
| 160
| 0.72175
| 7,661
| 45,628
| 3.987077
| 0.020754
| 0.025863
| 0.048519
| 0.026289
| 0.971681
| 0.970732
| 0.966279
| 0.962056
| 0.957636
| 0.95338
| 0
| 0.016971
| 0.137328
| 45,628
| 1,140
| 161
| 40.024561
| 0.759032
| 0.007254
| 0
| 0.89375
| 0
| 0
| 0.028749
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039583
| false
| 0
| 0.011458
| 0.007292
| 0.091667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95d0dc4ca88d377a725a07e8dce6a3d5317639af
| 4,122
|
py
|
Python
|
api/databaseutils.py
|
Guergeiro/bd2-group2
|
34520e40ef915dc7bb89486ac2322f8994ac9862
|
[
"MIT"
] | null | null | null |
api/databaseutils.py
|
Guergeiro/bd2-group2
|
34520e40ef915dc7bb89486ac2322f8994ac9862
|
[
"MIT"
] | 12
|
2019-11-17T21:22:49.000Z
|
2020-01-19T18:10:00.000Z
|
api/databaseutils.py
|
Guergeiro/bd2-group2
|
34520e40ef915dc7bb89486ac2322f8994ac9862
|
[
"MIT"
] | 2
|
2019-11-15T16:47:21.000Z
|
2019-11-17T13:59:40.000Z
|
from flask import request, jsonify
import psycopg2
import sys
def getAll(columns, query):
try:
connection = psycopg2.connect(host=sys.argv[1],
port=sys.argv[2],
database=sys.argv[3],
user=sys.argv[4],
password=sys.argv[5])
cursor = connection.cursor()
cursor.execute(query)
query_result = cursor.fetchall()
connection.commit()
except (Exception, psycopg2.Error) as error:
return jsonify({"message": error}), 400
finally:
if (connection):
cursor.close()
connection.close()
print("PostgreSQL connection is close.")
return jsonify({"message": beautifyFetchAll(columns, query_result)}), 200
def getOne(columns, query):
try:
connection = psycopg2.connect(host=sys.argv[1],
port=sys.argv[2],
database=sys.argv[3],
user=sys.argv[4],
password=sys.argv[5])
cursor = connection.cursor()
cursor.execute(query)
query_result = cursor.fetchone()
connection.commit()
except (Exception, psycopg2.Error) as error:
return jsonify({"message": error}), 400
finally:
if (connection):
cursor.close()
connection.close()
print("PostgreSQL connection is close.")
if query_result is None:
return jsonify({"message": "Not Found"}), 400
return jsonify({"message": beautifyFetchOne(columns, query_result)}), 200
def postOne(columns, query):
try:
connection = psycopg2.connect(host=sys.argv[1],
port=sys.argv[2],
database=sys.argv[3],
user=sys.argv[4],
password=sys.argv[5])
cursor = connection.cursor()
cursor.execute(query)
query_result = cursor.fetchone()
connection.commit()
except (Exception, psycopg2.Error) as error:
return jsonify({"message": error}), 400
finally:
if (connection):
cursor.close()
connection.close()
print("PostgreSQL connection is close.")
return jsonify({"message": beautifyFetchOne(columns, query_result)}), 200
def putOne(query):
try:
connection = psycopg2.connect(host=sys.argv[1],
port=sys.argv[2],
database=sys.argv[3],
user=sys.argv[4],
password=sys.argv[5])
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
except (Exception, psycopg2.Error) as error:
return jsonify({"message": error}), 400
finally:
if (connection):
cursor.close()
connection.close()
print("PostgreSQL connection is closed")
return jsonify({"message": "Success"}), 200
def deleteOne(query):
try:
connection = psycopg2.connect(host=sys.argv[1],
port=sys.argv[2],
database=sys.argv[3],
user=sys.argv[4],
password=sys.argv[5])
cursor = connection.cursor()
cursor.execute(query)
connection.commit()
except (Exception, psycopg2.Error) as error:
return jsonify({"message": error}), 400
finally:
if (connection):
cursor.close()
connection.close()
print("PostgreSQL connection is closed")
return jsonify({"message": "Success"}), 200
def beautifyFetchAll(array1, array2):
return [dict(zip(array1, element)) for element in array2]
def beautifyFetchOne(array1, array2):
return dict(zip(array1, array2))
| 30.308824
| 77
| 0.510189
| 381
| 4,122
| 5.501312
| 0.165354
| 0.083492
| 0.104962
| 0.062023
| 0.890744
| 0.879294
| 0.849714
| 0.849714
| 0.849714
| 0.802004
| 0
| 0.030184
| 0.381126
| 4,122
| 135
| 78
| 30.533333
| 0.791454
| 0
| 0
| 0.843137
| 0
| 0
| 0.061863
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068627
| false
| 0.04902
| 0.029412
| 0.019608
| 0.22549
| 0.04902
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95e26e4cba32c71a753a95113c5e51558d4c7da3
| 34,225
|
py
|
Python
|
test/test_acl_report.py
|
broth-itk/firewall_policy_report
|
4487797abfac13e71024813425b45fb48a8c3e1f
|
[
"MIT"
] | 1
|
2022-03-05T08:58:57.000Z
|
2022-03-05T08:58:57.000Z
|
test/test_acl_report.py
|
broth-itk/firewall_policy_report
|
4487797abfac13e71024813425b45fb48a8c3e1f
|
[
"MIT"
] | null | null | null |
test/test_acl_report.py
|
broth-itk/firewall_policy_report
|
4487797abfac13e71024813425b45fb48a8c3e1f
|
[
"MIT"
] | 1
|
2022-01-21T07:47:16.000Z
|
2022-01-21T07:47:16.000Z
|
import re
import os
# From the named script import the functions to be tested
from main import create_fw_dict
import asa
import ckp
from .example_acls import ckp_acl
# INPUT_DATA: Tests that the input file of FWs is converted into corretc data-model format
def test_data_model():
my_vars = dict(user='glob_user', pword='glob_pword',
asa=dict(user='asa_user', pword='asa_pword', fw=[dict(ip_name='10.10.10.1', user='fw_user', pword='fw_pword'), dict(ip_name='10.10.10.2')]),
ckp=dict(fw=[dict(ip_name='10.10.20.1', user='fw_user', pword='fw_pword'), dict(ip_name='10.10.20.2')]))
fw_type = [] # Required to stop errors
assert dict(create_fw_dict(my_vars, 'asa')) == {'asa': [{'10.10.10.1': ('fw_user', 'fw_pword')}, {'10.10.10.2': ('asa_user', 'asa_pword')}]}
assert dict(create_fw_dict(my_vars, 'ckp')) == {'ckp': [{'10.10.20.1': ('fw_user', 'fw_pword')}, {'10.10.20.2': ('glob_user', 'glob_pword')}]}
# ASA_FORMAT: Loads test ACLs and ensures that the ACL and Expanded ACL are output in the correct formated
def test_asa_format_data():
# Load the files and remove blanks from acl_brief (cant do in script as in function that requires device connectivity)
with open(os.path.join(os.path.dirname(__file__), 'example_acls', 'asa_acl_brief.txt')) as file_content:
acl_brief_temp = file_content.read()
with open(os.path.join(os.path.dirname(__file__), 'example_acls', 'asa_acl_expanded.txt')) as file_content:
acl_expanded = file_content.read()
acl_brief = []
for item in acl_brief_temp:
for line in item.splitlines():
if re.match(r"^\S{8}\s\S{8}\s", line):
acl_brief.append(line)
acl = asa.format_acl('1.1.1.1', acl_brief, acl_expanded)
assert acl['1.1.1.1_acl'] == [['stecap', '1', 'permit', 'ip', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['stecap', '2', 'permit', 'tcp', '10.10.10.0/32', 'any_port', 'any', '443', '0', '', '', ''] ,
['mgmt', '2', 'permit', 'icmp', 'any', 'any_port', 'any', 'echo', '13759', '', '', ''] ,
['mgmt', '3', 'permit', 'icmp', '1.1.1.1/32', 'any_port', 'any', 'echo-reply', '0', '', '', ''] ,
['mgmt', '4', 'permit', 'icmp', 'any', 'any_port', '2.2.2.2/32', 'unreachable', '3028', '', '', ''] ,
['mgmt', '5', 'permit', 'icmp', '10.10.10.0/24', 'any_port', 'any', 'time-exceeded', '0', '', '', ''] ,
['mgmt', '6', 'deny', 'icmp', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['mgmt', '9', 'permit', 'tcp', '10.10.10.1/32', 'any_port', 'obj_67-68', 'any_port', '0', '', '', 'inactive'] ,
['mgmt', '10', 'permit', 'tcp', 'any', '22', '20.20.20.0/24', '67-68', '9222', '', '', ''] ,
['mgmt', '11', 'permit', 'tcp', '10.10.10.1/32', 'any_port', 'obj_67-68', 'any_port', '0', '', '', ''] ,
['mgmt', '12', 'permit', 'tcp', '20.20.20.0/24', '22', 'any', '22', '1227', '', '', ''] ,
['Outside_mpc', '1', 'permit', 'icmp', 'any', 'any_port', 'any', 'echo-reply', '30', '', '', ''] ,
['Outside_mpc', '2', 'permit', 'ip', 'any', 'any_port', '185.4.167.128/28', 'any_port', '0', '', '', 'inactive'] ,
['Outside_mpc', '3', 'permit', 'tcp', 'any', 'any_port', 'obj_dc1dmznpgp03', 'any_port', '114382', '', '', ''] ,
['Outside_mpc', '4', 'permit', 'udp', 'obj_dc1dmznpgp03', 'any_port', 'obj_dc2dmzdns03', '53', '114382', '', '', ''] ,
['Outside_mpc', '5', 'permit', 'svc-grp_TCPUDP', 'intf_Outside_mpc', 'any_port', 'grp_UMB_DNS', 'domain', '0', '', '', ''] ,
['Outside_mpc', '6', 'deny', 'icmp', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['outside', '2', 'deny', 'tcp', 'any', 'any_port', 'grp_HTTP_HTTPS', 'any_port', '0', '', '', ''] ,
['outside', '3', 'deny', 'ip', 'any', 'any_port', 'grp_LOCAL_NETWORKS', 'any_port', '24876', '', '', '']]
assert acl['1.1.1.1_exp_acl'] == [['stecap', '1', 'permit', 'ip', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['stecap', '2', 'permit', 'tcp', '10.10.10.0/32', 'any_port', 'any', '443', '0', '', '', ''] ,
['stecap', '2', 'permit', 'tcp', 'any', 'any_port', '10.10.10.0/24', '443', '0', '', '', ''] ,
['mgmt', '2', 'permit', 'icmp', 'any', 'any_port', 'any', 'echo', '13759', '', '', ''] ,
['mgmt', '3', 'permit', 'icmp', '1.1.1.1/32', 'any_port', 'any', 'echo-reply', '0', '', '', ''] ,
['mgmt', '4', 'permit', 'icmp', 'any', 'any_port', '2.2.2.2/32', 'unreachable', '3028', '', '', ''] ,
['mgmt', '5', 'permit', 'icmp', '10.10.10.0/24', 'any_port', 'any', 'time-exceeded', '0', '', '', ''] ,
['mgmt', '5', 'permit', 'icmp', 'any', 'any_port', '10.10.10.0/24', 'time-exceeded', '0', '', '', ''] ,
['mgmt', '6', 'deny', 'icmp', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['mgmt', '10', 'permit', 'tcp', 'any', '22', '20.20.20.0/24', '67-68', '9222', '', '', ''] ,
['mgmt', '12', 'permit', 'tcp', '20.20.20.0/24', '22', 'any', '22', '1227', '', '', ''] ,
['Outside_mpc', '1', 'permit', 'icmp', 'any', 'any_port', 'any', 'echo-reply', '30', '', '', ''] ,
['Outside_mpc', '2', 'permit', 'ip', 'any', 'any_port', '185.4.167.128/28', 'any_port', '0', '', '', 'inactive'] ,
['Outside_mpc', '3', 'permit', 'tcp', 'any', 'any_port', '10.255.111.85/32', 'https', '96119', '', '', ''] ,
['Outside_mpc', '3', 'permit', 'tcp', 'any', 'any_port', '10.255.111.85/32', 'ldaps', '15681', '', '', ''] ,
['Outside_mpc', '3', 'permit', 'tcp', 'any', 'any_port', '10.255.111.85/32', 'ldap', '2582', '', '', ''] ,
['Outside_mpc', '4', 'permit', 'udp', '10.255.111.85/32', 'any_port', '10.255.211.211/32', '53', '114382', '', '', ''] ,
['Outside_mpc', '5', 'permit', 'udp', 'intf_Outside_mpc', 'any_port', '10.255.120.14/32', 'domain', '0', '', '', ''] ,
['Outside_mpc', '5', 'permit', 'tcp', 'intf_Outside_mpc', 'any_port', '10.255.120.14/32', 'domain', '0', '', '', ''] ,
['Outside_mpc', '6', 'deny', 'icmp', 'any', 'any_port', 'any', 'any_port', '0', '', '', ''] ,
['outside', '2', 'deny', 'tcp', 'any', 'www', 'any', 'any_port', '0', '', '', ''] ,
['outside', '2', 'deny', 'tcp', 'any', 'https', 'any', 'any_port', '0', '', '', ''] ,
['outside', '3', 'deny', 'ip', 'any', 'any_port', '10.10.10.0/24', 'any_port', '24876', '', '', ''] ,
['outside', '3', 'deny', 'ip', 'any', 'any_port', '10.10.20.0/24', 'any_port', '0', '', '', '']]
# CKP_FORMAT: Loads test ACLs and ensures that the ACL and Expanded ACL are output in the correct formated
def test_ckp_format_data():
acl_brief = ckp_acl.acl_brief
acl_expanded = ckp_acl.acl_expanded
acl = ckp.format_acl('1.1.1.1', acl_brief, acl_expanded)
assert acl['1.1.1.1_acl'] == [['appctrl', 1, 'Accept', 'app', 'net_10.0.0.0', 'any_port', 'Internet', 'Office365', 16729509098, '2021-05-20', '19:11:33', ''],
['appctrl', 2, 'Accept', 'app', 'net_172.16.0.0', 'any_port', 'Internet', 'Facebook', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', 'net_172.16.0.0', 'any_port', 'Internet', 'Facebook Apps', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', 'net_192.168.0.0', 'any_port', 'Internet', 'Facebook', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', 'net_192.168.0.0', 'any_port', 'Internet', 'Facebook Apps', 0, '', '', ''],
['appctrl', 3, 'Accept', 'app-grp', 'grp_RFC1918', 'any_port', 'Internet', 'sites', 0, '', '', ''],
['appctrl', 4, 'Drop', 'tcp', 'net_172.16.0.0', 'any_port', 'hst_google', '443', 0, '', '', ''],
['appctrl', 4, 'Drop', 'tcp', 'net_172.16.0.0', 'any_port', 'hst_google', '80', 0, '', '', ''],
['appctrl', 5, 'Drop', 'any', 'Any', 'any_port', 'Any', 'any', 0, '', '', ''],
['inline_app_ctrl', 1, 'Drop', 'app', 'net_10.0.0.0', 'any_port', 'grp_amb_dmz_svrs', 'stesworld', 0, '', '', ''],
['inline_app_ctrl', 2, 'Accept', 'app', 'Any', 'any_port', 'Internet', 'Office365-Outlook', 0, '', '', ''],
['inline_app_ctrl', 2, 'Accept', 'app', 'Any', 'any_port', 'Internet', 'Outlook Web Access', 0, '', '', ''],
['Network', 1, 'Accept', 'any', 'rpt_hme-ckp-gw01', 'any_port', 'mgr_hme-ckp-mgmt01', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', 'rpt_hme-ckp-gw01', 'any_port', 'rpt_hme-ckp-gw01', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', 'mgr_hme-ckp-mgmt01', 'any_port', 'mgr_hme-ckp-mgmt01', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', 'mgr_hme-ckp-mgmt01', 'any_port', 'rpt_hme-ckp-gw01', 'any', 0, '', '', ''],
['Network', 2, 'Accept', 'svc-grp', 'NOT_grp_amb_dmz_svrs', 'any_port', 'unknown_vpn_range', 'grp_dns_dhcp', 0, '', '', ''],
['Network', 3, 'Accept', 'svc-grp', 'grp_blu_servers', 'any_port', 'NOT_grp_RFC1918', 'grp_http_https', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_INET_10.99.99.0', 'any_port', 'net_network_10.10.10.0', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_INET_10.99.99.0', 'any_port', 'net_network_10.10.10.0', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_INET_10.99.99.0', 'any_port', 'net_network_10.20.8.0', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_INET_10.99.99.0', 'any_port', 'net_network_10.20.8.0', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_network_10.99.10.0', 'any_port', 'net_network_10.10.10.0', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_network_10.99.10.0', 'any_port', 'net_network_10.10.10.0', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_network_10.99.10.0', 'any_port', 'net_network_10.20.8.0', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', 'net_network_10.99.10.0', 'any_port', 'net_network_10.20.8.0', 'NOT_179', 0, '', '', ''],
['Network', 5, 'Drop', 'svc-grp', 'hst_dmz_svr2', 'any_port', 'net_192.168.0.0', 'NOT_grp_dns_dhcp', 0, '', '', ''],
['Network', 5, 'Drop', 'svc-grp', 'hst_dmz_svr2', 'any_port', 'net_192.168.0.0', 'NOT_grp_http_https', 0, '', '', ''],
['Network', 5, 'Drop', 'svc-grp', 'hst_dmz_svr2', 'any_port', 'net_172.16.0.0', 'NOT_grp_dns_dhcp', 0, '', '', ''],
['Network', 5, 'Drop', 'svc-grp', 'hst_dmz_svr2', 'any_port', 'net_172.16.0.0', 'NOT_grp_http_https', 0, '', '', ''],
['Network', 6, 'Accept', 'svc-grp', 'grp_blu_servers', 'any_port', 'grp_amb_dmz_svrs', 'icmp-requests', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', 'hst_dmz_svr1', 'any_port', 'grp_RFC1918', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'svc-grp', 'hst_dmz_svr1', 'any_port', 'grp_RFC1918', 'kerberos', 0, '', '', ''],
['Network', 7, 'Accept', 'svc-grp', 'hst_dmz_svr1', 'any_port', 'grp_RFC1918', 'dns', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', 'hst_dmz_svr1', 'any_port', 'grp_RFC1918', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', 'hst_dmz_svr1', 'any_port', 'grp_RFC1918', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', 'hst_server1', 'any_port', 'grp_RFC1918', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'svc-grp', 'hst_server1', 'any_port', 'grp_RFC1918', 'kerberos', 0, '', '', ''],
['Network', 7, 'Accept', 'svc-grp', 'hst_server1', 'any_port', 'grp_RFC1918', 'dns', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', 'hst_server1', 'any_port', 'grp_RFC1918', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', 'hst_server1', 'any_port', 'grp_RFC1918', '49', 0, '', '', ''],
['Network', 8, 'Accept', 'any', 'Any', 'any_port', 'grp_amb_dmz_svrs', 'any', 0, '', '', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'svc-grp', 'grp_RFC1918', 'any_port', 'Any', 'grp_http_https', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', 'grp_RFC1918', 'any_port', 'Any', '8080', 3495974, '2021-05-20', '20:28:09', '']]
assert acl['1.1.1.1_exp_acl'] == [['appctrl', 1, 'Accept', 'app', '10.0.0.0/8', 'any_port', 'Internet', 'Office365', 16729509098, '2021-05-20', '19:11:33', ''],
['appctrl', 2, 'Accept', 'app', '172.16.0.0/12', 'any_port', 'Internet', 'Facebook Apps', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', '172.16.0.0/12', 'any_port', 'Internet', 'Facebook', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', '192.168.0.0/16', 'any_port', 'Internet', 'Facebook Apps', 0, '', '', ''],
['appctrl', 2, 'Accept', 'app', '192.168.0.0/16', 'any_port', 'Internet', 'Facebook', 0, '', '', ''],
['appctrl', 3, 'Accept', 'app-grp', '192.168.0.0/16', 'any_port', 'Internet', 'sites', 0, '', '', ''],
['appctrl', 3, 'Accept', 'app-grp', '172.16.0.0/12', 'any_port', 'Internet', 'sites', 0, '', '', ''],
['appctrl', 3, 'Accept', 'app-grp', '10.0.0.0/8', 'any_port', 'Internet', 'sites', 0, '', '', ''],
['appctrl', 4, 'Drop', 'tcp', '172.16.0.0/12', 'any_port', '8.8.8.8/32', '443', 0, '', '', ''],
['appctrl', 4, 'Drop', 'tcp', '172.16.0.0/12', 'any_port', '8.8.8.8/32', '80', 0, '', '', ''],
['appctrl', 5, 'Drop', 'any', 'any', 'any_port', 'any', 'any', 0, '', '', ''],
['inline_app_ctrl', 1, 'Drop', 'app', '10.0.0.0/8', 'any_port', '10.80.2.100-10.80.2.167', 'stesworld', 0, '', '', ''],
['inline_app_ctrl', 1, 'Drop', 'app', '10.0.0.0/8', 'any_port', '10.99.10.20/32', 'stesworld', 0, '', '', ''],
['inline_app_ctrl', 1, 'Drop', 'app', '10.0.0.0/8', 'any_port', '10.99.10.10/32', 'stesworld', 0, '', '', ''],
['inline_app_ctrl', 2, 'Accept', 'app', 'any', 'any_port', 'Internet', 'Outlook Web Access', 0, '', '', ''],
['inline_app_ctrl', 2, 'Accept', 'app', 'any', 'any_port', 'Internet', 'Office365-Outlook', 0, '', '', ''],
['Network', 1, 'Accept', 'any', '10.10.10.31/32', 'any_port', '10.10.10.32/32', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', '10.10.10.31/32', 'any_port', '10.10.10.31/32', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', '10.10.10.32/32', 'any_port', '10.10.10.32/32', 'any', 0, '', '', ''],
['Network', 1, 'Accept', 'any', '10.10.10.32/32', 'any_port', '10.10.10.31/32', 'any', 0, '', '', ''],
['Network', 2, 'Accept', 'udp', 'NOT_grp_amb_dmz_svrs', 'any_port', '10.80.2.100-10.80.2.167', '67', 0, '', '', ''],
['Network', 2, 'Accept', 'udp', 'NOT_grp_amb_dmz_svrs', 'any_port', '10.80.2.100-10.80.2.167', '53', 0, '', '', ''],
['Network', 3, 'Accept', 'tcp', '10.10.10.20/32', 'any_port', 'NOT_grp_RFC1918', '443', 0, '', '', ''],
['Network', 3, 'Accept', 'tcp', '10.10.10.20/32', 'any_port', 'NOT_grp_RFC1918', '80', 0, '', '', ''],
['Network', 3, 'Accept', 'tcp', '10.10.20.20/32', 'any_port', 'NOT_grp_RFC1918', '443', 0, '', '', ''],
['Network', 3, 'Accept', 'tcp', '10.10.20.20/32', 'any_port', 'NOT_grp_RFC1918', '80', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.10.0/24', 'any_port', '10.20.8.0/21', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.10.0/24', 'any_port', '10.20.8.0/21', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.10.0/24', 'any_port', '10.10.10.0/24', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.10.0/24', 'any_port', '10.10.10.0/24', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.99.0/29', 'any_port', '10.20.8.0/21', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.99.0/29', 'any_port', '10.20.8.0/21', 'NOT_22', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.99.0/29', 'any_port', '10.10.10.0/24', 'NOT_179', 0, '', '', ''],
['Network', 4, 'Drop', 'tcp', '10.99.99.0/29', 'any_port', '10.10.10.0/24', 'NOT_22', 0, '', '', ''],
['Network', 5, 'Drop', 'udp', '10.99.10.20/32', 'any_port', '192.168.0.0/16', 'NOT_67', 0, '', '', ''],
['Network', 5, 'Drop', 'tcp', '10.99.10.20/32', 'any_port', '192.168.0.0/16', 'NOT_443', 0, '', '', ''],
['Network', 5, 'Drop', 'tcp', '10.99.10.20/32', 'any_port', '192.168.0.0/16', 'NOT_80', 0, '', '', ''],
['Network', 5, 'Drop', 'udp', '10.99.10.20/32', 'any_port', '192.168.0.0/16', 'NOT_53', 0, '', '', ''],
['Network', 5, 'Drop', 'udp', '10.99.10.20/32', 'any_port', '172.16.0.0/12', 'NOT_67', 0, '', '', ''],
['Network', 5, 'Drop', 'tcp', '10.99.10.20/32', 'any_port', '172.16.0.0/12', 'NOT_443', 0, '', '', ''],
['Network', 5, 'Drop', 'tcp', '10.99.10.20/32', 'any_port', '172.16.0.0/12', 'NOT_80', 0, '', '', ''],
['Network', 5, 'Drop', 'udp', '10.99.10.20/32', 'any_port', '172.16.0.0/12', 'NOT_53', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.80.2.100-10.80.2.167', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.80.2.100-10.80.2.167', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.80.2.100-10.80.2.167', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.80.2.100-10.80.2.167', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.20/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.20/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.20/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.20/32', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.10/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.10/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.10/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.50.4-10.10.50.124', 'any_port', '10.99.10.10/32', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.20/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.20/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.20/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.20/32', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.10/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.10/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.10/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.20.20/32', 'any_port', '10.99.10.10/32', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.80.2.100-10.80.2.167', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.20/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.20/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.20/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.20/32', 'timestamp', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.10/32', 'echo-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.10/32', 'mask-request', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.10/32', 'info-req', 0, '', '', ''],
['Network', 6, 'Accept', 'icmp', '10.10.10.20/32', 'any_port', '10.99.10.10/32', 'timestamp', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '10.0.0.0/8', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '10.0.0.0/8', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '10.0.0.0/8', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.10.10.20/32', 'any_port', '10.0.0.0/8', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '10.0.0.0/8', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.10.10.20/32', 'any_port', '10.0.0.0/8', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '192.168.0.0/16', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '192.168.0.0/16', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '192.168.0.0/16', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.10.10.20/32', 'any_port', '192.168.0.0/16', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '192.168.0.0/16', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.10.10.20/32', 'any_port', '192.168.0.0/16', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '172.16.0.0/12', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '172.16.0.0/12', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '172.16.0.0/12', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.10.10.20/32', 'any_port', '172.16.0.0/12', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.10.10.20/32', 'any_port', '172.16.0.0/12', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.10.10.20/32', 'any_port', '172.16.0.0/12', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '10.0.0.0/8', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '10.0.0.0/8', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '10.0.0.0/8', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.99.10.10/32', 'any_port', '10.0.0.0/8', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '10.0.0.0/8', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.99.10.10/32', 'any_port', '10.0.0.0/8', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '192.168.0.0/16', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '192.168.0.0/16', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '192.168.0.0/16', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.99.10.10/32', 'any_port', '192.168.0.0/16', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '192.168.0.0/16', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.99.10.10/32', 'any_port', '192.168.0.0/16', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '172.16.0.0/12', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '172.16.0.0/12', '49', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '172.16.0.0/12', '514', 0, '', '', ''],
['Network', 7, 'Accept', 'tcp', '10.99.10.10/32', 'any_port', '172.16.0.0/12', '53', 0, '', '', ''],
['Network', 7, 'Accept', 'udp', '10.99.10.10/32', 'any_port', '172.16.0.0/12', '750', 0, '', '', ''],
['Network', 7, 'Accept', 'dce-rpc', '10.99.10.10/32', 'any_port', '172.16.0.0/12', 'ALL_DCE_RPC', 0, '', '', ''],
['Network', 8, 'Accept', 'any', 'any', 'any_port', '10.99.10.10/32', 'any', 0, '', '', ''],
['Network', 8, 'Accept', 'any', 'any', 'any_port', '10.99.10.20/32', 'any', 0, '', '', ''],
['Network', 8, 'Accept', 'any', 'any', 'any_port', '10.80.2.100-10.80.2.167', 'any', 0, '', '', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '10.0.0.0/8', 'any_port', 'any', '8080', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '10.0.0.0/8', 'any_port', 'any', '443', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '10.0.0.0/8', 'any_port', 'any', '80', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '192.168.0.0/16', 'any_port', 'any', '8080', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '192.168.0.0/16', 'any_port', 'any', '443', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '192.168.0.0/16', 'any_port', 'any', '80', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '172.16.0.0/12', 'any_port', 'any', '8080', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '172.16.0.0/12', 'any_port', 'any', '443', 3495974, '2021-05-20', '20:28:09', ''],
['Network', 9, 'POLICY_inline_app_ctrl', 'tcp', '172.16.0.0/12', 'any_port', 'any', '80', 3495974, '2021-05-20', '20:28:09', '']]
| 134.215686
| 181
| 0.383638
| 4,111
| 34,225
| 3.053272
| 0.057407
| 0.12492
| 0.059512
| 0.047323
| 0.919296
| 0.899219
| 0.884481
| 0.869105
| 0.845921
| 0.833971
| 0
| 0.175401
| 0.352169
| 34,225
| 254
| 182
| 134.744094
| 0.390718
| 0.014463
| 0
| 0.091286
| 0
| 0
| 0.382439
| 0.033983
| 0
| 0
| 0
| 0
| 0.024896
| 1
| 0.012448
| false
| 0
| 0.024896
| 0
| 0.037344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
25246c22ca78eeb1dc86e074daac07f793eb128d
| 62
|
py
|
Python
|
tests/data/force_pyi.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 18
|
2019-12-12T13:07:07.000Z
|
2022-03-15T12:15:38.000Z
|
tests/data/force_pyi.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 14
|
2020-01-15T02:47:54.000Z
|
2022-03-30T13:38:54.000Z
|
tests/data/force_pyi.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 8
|
2019-11-12T11:22:09.000Z
|
2021-03-05T21:56:06.000Z
|
def f(): ...
def g(): ...
# output
def f(): ...
def g(): ...
| 8.857143
| 12
| 0.354839
| 9
| 62
| 2.444444
| 0.444444
| 0.363636
| 0.636364
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.241935
| 62
| 6
| 13
| 10.333333
| 0.468085
| 0.096774
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
2534d6f0c8250b444f5d6b51dff531ee59900063
| 13,133
|
py
|
Python
|
python/test/basic/test_cholupdnrk1.py
|
mseeger/apbsint
|
6acd73fb1d7b5168f3cee8584a8074d6617478a6
|
[
"BSD-3-Clause"
] | 9
|
2016-01-09T10:37:11.000Z
|
2021-11-21T08:41:02.000Z
|
python/test/basic/test_cholupdnrk1.py
|
mseeger/apbsint
|
6acd73fb1d7b5168f3cee8584a8074d6617478a6
|
[
"BSD-3-Clause"
] | null | null | null |
python/test/basic/test_cholupdnrk1.py
|
mseeger/apbsint
|
6acd73fb1d7b5168f3cee8584a8074d6617478a6
|
[
"BSD-3-Clause"
] | 4
|
2017-01-25T17:59:44.000Z
|
2020-10-26T17:48:18.000Z
|
#! /usr/bin/env python
import numpy as np
import apbsint as abt
# Helper functions
def maxreldiff(a,b):
return (np.abs(a-b)/np.maximum(np.maximum(np.abs(a),np.abs(b)),1e-8)).max()
# I/O generated by matlab/test/basic/test_cholupdnrk1.m
lfact = np.array([[8.3033086285545287807963177e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-2.8565097159532981674345820e-01, 5.8860897380357912744841542e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-8.3136651156762431114088940e-01, 1.0186852821285754533420231e+00, 6.7852506102343890237449386e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-9.7920630516730211567733022e-01, -1.3321747950773468738283611e-01, 3.0753515923825192057705635e-01, 3.3728357977152145785737503e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-1.1564016556640022148627622e+00, -7.1453016378715838996527054e-01, -1.2571183593520534049758908e+00, -1.3028465314572063027398485e-01, 5.5884882817993108972132177e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-5.3355710931598743229642423e-01, 1.3513857684266568082165350e+00, -8.6546803055480381328123940e-01, 1.8368909586194212546494953e-01, 1.2606587091208962814192773e+00, 1.0630885392869129368875747e+00, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-2.0026357358830604304955614e+00, -2.2477105605258410059299479e-01, -1.7653411423145093372966130e-01, -4.7615301661907383223848456e-01, 6.6014314104697768836871319e-01, -2.0971333838873670862845700e-01, 6.4680571873896797185921059e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[9.6422942263162747522642348e-01, -5.8902903072080126101184305e-01, 7.9141606162863398221674061e-01, 8.6202161155692202942901758e-01, -6.7865553542687334642735664e-02, 6.2519035708762571434959909e-01, -1.4605463433152618413224388e-01, 6.2113583080400147640176556e-01, 0000000000000000000000000, 0000000000000000000000000], \
[5.2006010145545755740670302e-01, -2.9375359773541609431646293e-01, -1.3320044213152473222550043e+00, -1.3616944708707543476577939e+00, -1.9522119789875436168813394e-01, 1.8322726300143696298583507e-01, -5.3201137680882071290255908e-01, -1.0642134128893268041338160e+00, 3.3159438670852381392251118e-01, 0000000000000000000000000], \
[-2.0027851642538077592270440e-02, -8.4792624363793389630217234e-01, -2.3298671558050760843627813e+00, 4.5502955644433434878592948e-01, -2.1760635014319193447640544e-01, -1.0297675435666211463114905e+00, 1.6821035946631788249305828e+00, 1.6034572981200441166294013e+00, -1.2506789068264074771263950e+00, 5.8889774392016691617612878e-01]],order='F')
zmat = np.array([[2.3234701262447676750610981e-01, -3.7280874172350420003851923e-01, 2.0236908866030534071001057e+00, 2.2294456804568989483072983e+00, 1.0000608195891247387265821e+00, -5.9003456420522149006302470e-01, 4.2271569122047830679989033e-01, 4.7163432641630270714117046e-01, 6.6190048424611416799656638e-02, 3.2705996717708751830855363e-01], \
[4.2638755740894501711224507e-01, -2.3645458375718628185602199e-01, -2.2583539704961914651448751e+00, 3.3756370061310642105567581e-01, -1.6641644749870603270380798e+00, -2.7806416376530934675415097e-01, -1.6702006978504704282073590e+00, -1.2128471996744594196826483e+00, 6.5235588866137395047672953e-01, 1.0826335042367560834719598e+00]],order='F')
vec = np.array([1.0060771108190513789537590e+00, -6.5090773659775258419557531e-01, 2.5705615743396886818672442e-01, -9.4437780640421897793146400e-01, -1.3217885213925637533094459e+00, 9.2482593349370589841385026e-01, 4.9849075250813308288460463e-05, -5.4918914609406697946436537e-02, 9.1112726565385981913891555e-01, 5.9458369740905236966455050e-01])
yvec = np.array([3.5020117387453519874895846e-01, 1.2502512283049957986236222e+00])
pvec = np.array([7.7125388101057334999666182e-01, -1.9926074004100211012335819e-01, 5.1232817672378427875656826e-01, -1.6672220309894866807631786e-01, 2.2120501975817810635938088e-01, -3.3162586077901595738026685e-02, -6.0507729276895648451439769e-02, -4.0580182867232639232213387e-02, -6.4006738203784108165450562e-02, 1.1350755256349331911991385e-01])
lfact1 = np.array([[1.3044694303525439238455874e+00, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-6.8383986010710884340113580e-01, 6.1975876670439888904695636e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-3.3093221402129036068373580e-01, 7.1554206828997068967623818e-01, 1.2782204525524354377097325e+00, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-1.3516469380162792202071387e+00, -1.7475993855787919084576743e-01, 2.5193572862805097001270838e-01, 3.9473702761831624963306808e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-1.7555161569807617993888016e+00, -6.9443335305628273168565556e-01, -8.1621954143359243261102165e-01, -6.1636434157229968988644941e-01, 9.4571756452719923924377099e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[3.7365173677327651047264112e-01, 9.7036222097919444529168231e-01, 7.0412514689283411950526670e-01, -6.0274546527575534327070272e-01, 1.8299026677357035364224203e+00, 1.0860788913729197968649487e+00, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-1.2746945753247396204699271e+00, -6.9699187387448269248579891e-01, 1.0898533336961901962780530e+00, -8.6973197488790932041524684e-01, 8.0467735001529594462965633e-01, -1.5843126180135541636317953e-01, 6.9972863753142022780195930e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[5.7140226378730851752862918e-01, -3.1568097895728375323898263e-01, -3.6285323995038004651902952e-01, 1.3398009974206908090366142e+00, -4.7919229771944588103238516e-01, 6.6657818978435534607029922e-01, -8.4161642865519997736001301e-02, 6.4639118354155089551937863e-01, 0000000000000000000000000, 0000000000000000000000000], \
[1.0337430747083602433633587e+00, -3.3497992247987973257750127e-01, -6.4104702260593726670379056e-01, -1.7714566253461709877115027e+00, 1.2055341845305016235112561e-01, 1.1174793248331391093408627e-01, -6.2942296390985585929911394e-01, -1.0587693363297079329754524e+00, 3.7223139397255411431331140e-01, 0000000000000000000000000], \
[4.4582670284327424736758871e-01, -9.2862111899222599120662380e-01, -1.1446737972355913726119070e+00, -6.6698826737872507486315499e-01, 1.4629305994097125953601335e+00, -1.2824953842447186502795375e+00, 1.1343914416220095553455849e+00, 1.0811737301565909064038351e+00, -2.0407673327187421108419585e+00, 1.3805216971151428317909904e+00]],order='F')
zmat1 = np.array([[4.1798931274621098364008276e-01, -3.6775538052042211489833790e-01, 1.0105272674490330953034345e+00, 2.8166990923743844277282733e+00, 3.1567183230174095109887844e-01, -3.7115397272802652661738421e-01, 8.1339074436167080328630163e-01, 6.9202108525570826813577696e-01, 3.7633082378274640422688435e-01, -3.9609308419032474013476985e-01], \
[1.2356685825819859747554119e+00, -3.7075082892540833778838305e-01, -8.8569367871706394801378792e-01, -8.0779455550969392341187358e-01, 6.1254187163265017002089508e-01, -7.8620759739328494042354123e-01, -2.4600530397397823989535937e+00, -1.6035309834875821621835712e+00, 4.2698468024831570666322023e-02, 1.6847476935949856624574750e+00]],order='F')
lfact2 = np.array([[8.3033086285545287807963177e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-2.8565097159532992776576066e-01, 5.8860897380357923847071788e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-8.3136651156762442216319187e-01, 1.0186852821285754533420231e+00, 6.7852506102343856930758648e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-9.7920630516730244874423761e-01, -1.3321747950773457636053365e-01, 3.0753515923825114342093912e-01, 3.3728357977152190194658488e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-1.1564016556640019928181573e+00, -7.1453016378715850098757301e-01, -1.2571183593520529608866809e+00, -1.3028465314572290623118533e-01, 5.5884882817993053460980946e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-5.3355710931598721025181931e-01, 1.3513857684266565861719300e+00, -8.6546803055480348021433201e-01, 1.8368909586193804539533403e-01, 1.2606587091208969475530921e+00, 1.0630885392869127148429698e+00, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[-2.0026357358830604304955614e+00, -2.2477105605258451692662902e-01, -1.7653411423145082270735884e-01, -4.7615301661907549757302149e-01, 6.6014314104697580098957133e-01, -2.0971333838873693067306192e-01, 6.4680571873896830492611798e-01, 0000000000000000000000000, 0000000000000000000000000, 0000000000000000000000000], \
[9.6422942263162747522642348e-01, -5.8902903072080103896723813e-01, 7.9141606162863409323904307e-01, 8.6202161155692369476355452e-01, -6.7865553542683615395603169e-02, 6.2519035708762449310427201e-01, -1.4605463433152704455508797e-01, 6.2113583080400069924564832e-01, 0000000000000000000000000, 0000000000000000000000000], \
[5.2006010145545766842900548e-01, -2.9375359773541620533876539e-01, -1.3320044213152473222550043e+00, -1.3616944708707552358362136e+00, -1.9522119789876052342592061e-01, 1.8322726300143932220976239e-01, -5.3201137680881771530039259e-01, -1.0642134128893243616431619e+00, 3.3159438670852642294661905e-01, 0000000000000000000000000], \
[-2.0027851642538119225633864e-02, -8.4792624363793389630217234e-01, -2.3298671558050760843627813e+00, 4.5502955644432874215965512e-01, -2.1760635014319151814277120e-01, -1.0297675435666191479100462e+00, 1.6821035946631739399492744e+00, 1.6034572981200516661459687e+00, -1.2506789068264294595422825e+00, 5.8889774392012195214363146e-01]],order='F')
zmat2 = np.array([[2.3234701262447673975053419e-01, -3.7280874172350408901621677e-01, 2.0236908866030538511893155e+00, 2.2294456804569007246641377e+00, 1.0000608195891331764215693e+00, -5.9003456420522604197742567e-01, 4.2271569122047553124232877e-01, 4.7163432641629532415805670e-01, 6.6190048424615538502635559e-02, 3.2705996717716373511919414e-01], \
[4.2638755740894507262339630e-01, -2.3645458375718636512274884e-01, -2.2583539704961923533232948e+00, 3.3756370061310070340709899e-01, -1.6641644749870609931718946e+00, -2.7806416376530407319478400e-01, -1.6702006978504746470548525e+00, -1.2128471996744529803891055e+00, 6.5235588866134863739176808e-01, 1.0826335042368238070764619e+00]],order='F')
r, n = zmat.shape
cvec = np.zeros(n); svec = np.zeros(n); workv = np.zeros(n)
lfact1b = lfact.copy(order='F')
zmat1b = zmat.copy(order='F')
stat = abt.eptools_ext.choluprk1(lfact1b,'L',vec,cvec,svec,workv,zmat1b,yvec)
print 'CHOLUPRK1: rdf(L) = %f, rdf(Z) = %f' % \
(maxreldiff(lfact1,lfact1b), maxreldiff(zmat1,zmat1b))
lfact2b = lfact1b.copy(order='F')
zmat2b = zmat1b.copy(order='F')
stat = abt.eptools_ext.choldnrk1(lfact2b,'L',pvec,cvec,svec,workv,zmat2b,yvec)
print 'CHOLDNRK1: rdf(L) = %f, rdf(Z) = %f' % \
(maxreldiff(lfact2,lfact2b), maxreldiff(zmat2,zmat2b))
| 196.014925
| 354
| 0.856316
| 1,051
| 13,133
| 10.697431
| 0.301618
| 0.480299
| 0.560349
| 0.560349
| 0.325714
| 0.305168
| 0.293694
| 0.262474
| 0.175843
| 0.082718
| 0
| 0.820676
| 0.042488
| 13,133
| 66
| 355
| 198.984848
| 0.0734
| 0.007005
| 0
| 0
| 0
| 0
| 0.006289
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.036364
| null | null | 0.036364
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c2a0ef5bbfe45a77669a7d025e93e9b41966e992
| 197
|
py
|
Python
|
test_hellopython.py
|
cndipowa/hellopython
|
3dc12b77ff62663eb2f035371a5de8ecaf570cff
|
[
"MIT"
] | null | null | null |
test_hellopython.py
|
cndipowa/hellopython
|
3dc12b77ff62663eb2f035371a5de8ecaf570cff
|
[
"MIT"
] | null | null | null |
test_hellopython.py
|
cndipowa/hellopython
|
3dc12b77ff62663eb2f035371a5de8ecaf570cff
|
[
"MIT"
] | null | null | null |
from hellopython import say_hello
def test_hellopython_no_params():
assert say_hello() == "Hello, World!"
def test_hellopython_with_params():
assert say_hello("Jon") == "Hello, Jon!"
| 24.625
| 44
| 0.715736
| 26
| 197
| 5.076923
| 0.5
| 0.181818
| 0.272727
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162437
| 197
| 7
| 45
| 28.142857
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.137056
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c2ba670273d09d118fb22a4bce80b09e19cc41c3
| 2,211
|
py
|
Python
|
test_fena/v1_13/test_datas.py
|
Aquafina-water-bottle/Fena
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 2
|
2017-07-04T02:27:11.000Z
|
2017-07-08T10:39:54.000Z
|
test_fena/v1_13/test_datas.py
|
Aquafina-water-bottle/Command-Compiler-Unlimited
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 6
|
2017-07-08T10:39:28.000Z
|
2018-02-08T23:10:46.000Z
|
test_fena/v1_13/test_datas.py
|
Aquafina-water-bottle/Fena
|
00a2b1dbc6f8abd968c46c637d6ad30d5fcde919
|
[
"MIT"
] | 1
|
2017-07-24T23:52:43.000Z
|
2017-07-24T23:52:43.000Z
|
from test_fena.test_common import test_cmd
def test_datas():
test_cmd("data ~ ~ ~ <-", "data get block ~ ~ ~")
test_cmd("data ~ ~ ~ <- Items", "data get block ~ ~ ~ Items")
test_cmd("data ~ ~ ~ <- Items.0", "data get block ~ ~ ~ Items.0")
test_cmd("data ~ ~ ~ <- Items.0 1", "data get block ~ ~ ~ Items.0 1")
test_cmd("data ~ ~ ~ <- Items.0 1.1", "data get block ~ ~ ~ Items.0 1.1")
test_cmd("data @s <-", "data get entity @s")
test_cmd("data @s <- Movement", "data get entity @s Movement")
test_cmd("data @s <- Movement.2", "data get entity @s Movement.2")
test_cmd("data @s <- Movement.2 1", "data get entity @s Movement.2 1")
test_cmd("data @s <- Movement.2 1.1", "data get entity @s Movement.2 1.1")
test_cmd(r'data ~ ~ ~ + {Lock:"asdf"}', r'data merge block ~ ~ ~ {Lock:"asdf"}')
test_cmd(r'data @s + {Invulnerable:1b}', r'data merge entity @s {Invulnerable:1b}')
test_cmd("data ~ ~ ~ - Lock", "data remove block ~ ~ ~ Lock")
test_cmd("data @s - Invulnerable", "data remove entity @s Invulnerable")
test_cmd("data (~ ~ ~) <-", "data get block ~ ~ ~")
test_cmd("data (~ ~ ~) <- Items", "data get block ~ ~ ~ Items")
test_cmd("data (~ ~ ~) <- Items.0", "data get block ~ ~ ~ Items.0")
test_cmd("data (~ ~ ~) <- Items.0 1", "data get block ~ ~ ~ Items.0 1")
test_cmd("data (~ ~ ~) <- Items.0 1.1", "data get block ~ ~ ~ Items.0 1.1")
test_cmd("data (@s) <-", "data get entity @s")
test_cmd("data (@s) <- Movement", "data get entity @s Movement")
test_cmd("data (@s) <- Movement.2", "data get entity @s Movement.2")
test_cmd("data (@s) <- Movement.2 1", "data get entity @s Movement.2 1")
test_cmd("data (@s) <- Movement.2 1.1", "data get entity @s Movement.2 1.1")
test_cmd(r'data (~ ~ ~) + {Lock:"asdf"}', r'data merge block ~ ~ ~ {Lock:"asdf"}')
test_cmd(r'data (@s) + {Invulnerable:1b}', r'data merge entity @s {Invulnerable:1b}')
test_cmd("data (~ ~ ~) - Lock", "data remove block ~ ~ ~ Lock")
test_cmd("data (@s) - Invulnerable", "data remove entity @s Invulnerable")
| 53.926829
| 89
| 0.543193
| 319
| 2,211
| 3.664577
| 0.07837
| 0.173653
| 0.225834
| 0.123182
| 0.959795
| 0.959795
| 0.959795
| 0.959795
| 0.959795
| 0.959795
| 0
| 0.031458
| 0.252374
| 2,211
| 40
| 90
| 55.275
| 0.675741
| 0
| 0
| 0
| 0
| 0
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| true
| 0
| 0.033333
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c2c69f986002286ae5b573acf0f5f6ef7fde57b1
| 72
|
py
|
Python
|
blockchain/all_challenges/2021/realworld/rwctf3rd-Re-Montagy/deploy/ethbot/src/banner/Sections/Source.py
|
yuxingzh/CTF
|
2d58d8443628b33958cafc65813c4719e78e6ee7
|
[
"MIT"
] | null | null | null |
blockchain/all_challenges/2021/realworld/rwctf3rd-Re-Montagy/deploy/ethbot/src/banner/Sections/Source.py
|
yuxingzh/CTF
|
2d58d8443628b33958cafc65813c4719e78e6ee7
|
[
"MIT"
] | null | null | null |
blockchain/all_challenges/2021/realworld/rwctf3rd-Re-Montagy/deploy/ethbot/src/banner/Sections/Source.py
|
yuxingzh/CTF
|
2d58d8443628b33958cafc65813c4719e78e6ee7
|
[
"MIT"
] | null | null | null |
from src.banner.text.corpus import SRC_TEXT
def run():
print(SRC_TEXT)
| 18
| 43
| 0.777778
| 13
| 72
| 4.153846
| 0.692308
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 4
| 44
| 18
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c2ef1b0427a564cb3265ea98e8dfcfc59c4d9f2c
| 63
|
py
|
Python
|
brainbox/task/__init__.py
|
SebastianBruijns/ibllib
|
49f2091b7a53430c00c339b862dfc1a53aab008b
|
[
"MIT"
] | 1
|
2020-11-21T07:02:21.000Z
|
2020-11-21T07:02:21.000Z
|
brainbox/task/__init__.py
|
SebastianBruijns/ibllib
|
49f2091b7a53430c00c339b862dfc1a53aab008b
|
[
"MIT"
] | null | null | null |
brainbox/task/__init__.py
|
SebastianBruijns/ibllib
|
49f2091b7a53430c00c339b862dfc1a53aab008b
|
[
"MIT"
] | null | null | null |
from .task import _get_spike_counts_in_bins
from .task import *
| 31.5
| 43
| 0.84127
| 11
| 63
| 4.363636
| 0.727273
| 0.333333
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 2
| 44
| 31.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6c06a318f568838ecf7d9dff1f1ba7be6b6bfefc
| 14,448
|
py
|
Python
|
exp/detect_coco_objects/evaluate_boxes.py
|
NUS-ISS-Pattern-Recognition-Project/action-cooccurrence-hico-det
|
78a9d08ac188a13b6ae99553031323082bd6913b
|
[
"MIT"
] | 27
|
2020-07-20T02:19:04.000Z
|
2021-09-30T07:58:15.000Z
|
exp/detect_coco_objects/evaluate_boxes.py
|
NUS-ISS-Pattern-Recognition-Project/action-cooccurrence-hico-det
|
78a9d08ac188a13b6ae99553031323082bd6913b
|
[
"MIT"
] | 5
|
2020-07-21T23:21:10.000Z
|
2021-05-28T07:02:44.000Z
|
exp/detect_coco_objects/evaluate_boxes.py
|
NUS-ISS-Pattern-Recognition-Project/action-cooccurrence-hico-det
|
78a9d08ac188a13b6ae99553031323082bd6913b
|
[
"MIT"
] | 4
|
2020-08-11T07:59:43.000Z
|
2021-04-01T07:45:02.000Z
|
"""
Depricated: This file does not support selected boxes stored in hdf5 format. To use
these functions please replace the code where selected_dets_npy is loaded with
code to read the same information from hdf5 file. Format in which the data is
stored in the hdf5 is described in exp/detect_coco_objects/data_description.md
"""
import os
import h5py
import numpy as np
from tqdm import tqdm
import utils.io as io
from utils.bbox_utils import compute_iou, compute_area
from data.coco_classes import COCO_CLASSES
def box_recall(gt_hois,human_boxes,object_boxes,iou_thresh):
num_pred_human_boxes = len(human_boxes)
num_pred_object_boxes = len(object_boxes)
num_pred_connections = num_pred_human_boxes*num_pred_object_boxes
num_gt_connections_recalled = 0
num_gt_connections = 0
num_gt_human_boxes_recalled = 0
num_gt_human_boxes = 0
num_gt_object_boxes_recalled = 0
num_gt_object_boxes = 0
for hois_per_type in gt_hois:
gt_connections = hois_per_type['connections']
gt_human_boxes = hois_per_type['human_bboxes']
gt_object_boxes = hois_per_type['object_bboxes']
invis = hois_per_type['invis']
gt_human_boxes_recalled = [False]*len(gt_human_boxes)
for i, gt_box in enumerate(gt_human_boxes):
for box in human_boxes:
try:
iou = compute_iou(box,gt_box)
except:
import pdb; pdb.set_trace()
if iou >= iou_thresh:
gt_human_boxes_recalled[i] = True
break
gt_object_boxes_recalled = [False]*len(gt_object_boxes)
for i, gt_box in enumerate(gt_object_boxes):
for box in object_boxes:
try:
iou = compute_iou(box,gt_box)
except:
import pdb; pdb.set_trace()
if iou >= iou_thresh:
gt_object_boxes_recalled[i] = True
break
gt_connections_recalled = [False]*len(gt_connections)
for k,(i,j) in enumerate(gt_connections):
if gt_human_boxes_recalled[i] and gt_object_boxes_recalled[j]:
gt_connections_recalled[k] = True
num_gt_connections += len(gt_connections)
num_gt_connections_recalled += gt_connections_recalled.count(True)
num_gt_human_boxes += len(gt_human_boxes)
num_gt_human_boxes_recalled += gt_human_boxes_recalled.count(True)
num_gt_object_boxes += len(gt_object_boxes)
num_gt_object_boxes_recalled += gt_object_boxes_recalled.count(True)
try:
connection_recall = num_gt_connections_recalled / num_gt_connections
except ZeroDivisionError:
connection_recall = None
try:
human_recall = num_gt_human_boxes_recalled / num_gt_human_boxes
except ZeroDivisionError:
human_recall = None
try:
object_recall = num_gt_object_boxes_recalled / num_gt_object_boxes
except ZeroDivisionError:
object_recall = None
stats = {
'connection_recall': connection_recall,
'human_recall': human_recall,
'object_recall': object_recall,
'num_gt_connections_recalled': num_gt_connections_recalled,
'num_gt_connections': num_gt_connections,
'num_gt_human_boxes_recalled': num_gt_human_boxes_recalled,
'num_gt_human_boxes': num_gt_human_boxes,
'num_gt_object_boxes_recalled': num_gt_object_boxes_recalled,
'num_gt_object_boxes': num_gt_object_boxes,
'num_connection_proposals': num_pred_connections,
'num_human_proposals': num_pred_human_boxes,
'num_object_proposals': num_pred_object_boxes,
}
return stats
def box_label_recall(gt_hois,human_boxes,object_boxes,object_labels,iou_thresh,hoi_list):
num_pred_human_boxes = len(human_boxes)
num_pred_object_boxes = len(object_boxes)
num_pred_connections = num_pred_human_boxes*num_pred_object_boxes
hoi_dict = {hoi['id']:hoi for hoi in hoi_list}
num_gt_connections_recalled = 0
num_gt_connections = 0
num_gt_human_boxes_recalled = 0
num_gt_human_boxes = 0
num_gt_object_boxes_recalled = 0
num_gt_object_boxes = 0
for hois_per_type in gt_hois:
gt_id = hois_per_type['id']
gt_hoi = hoi_dict[gt_id]
gt_connections = hois_per_type['connections']
gt_human_boxes = hois_per_type['human_bboxes']
gt_object_boxes = hois_per_type['object_bboxes']
invis = hois_per_type['invis']
gt_human_boxes_recalled = [False]*len(gt_human_boxes)
for i, gt_box in enumerate(gt_human_boxes):
for box in human_boxes:
try:
iou = compute_iou(box,gt_box)
except:
import pdb; pdb.set_trace()
if iou >= iou_thresh:
gt_human_boxes_recalled[i] = True
break
gt_object_boxes_recalled = [False]*len(gt_object_boxes)
for i, gt_box in enumerate(gt_object_boxes):
for box,label in zip(object_boxes,object_labels):
try:
iou = compute_iou(box,gt_box)
except:
import pdb; pdb.set_trace()
if iou >= iou_thresh and label == gt_hoi['object']:
gt_object_boxes_recalled[i] = True
break
gt_connections_recalled = [False]*len(gt_connections)
for k,(i,j) in enumerate(gt_connections):
if gt_human_boxes_recalled[i] and gt_object_boxes_recalled[j]:
gt_connections_recalled[k] = True
num_gt_connections += len(gt_connections)
num_gt_connections_recalled += gt_connections_recalled.count(True)
num_gt_human_boxes += len(gt_human_boxes)
num_gt_human_boxes_recalled += gt_human_boxes_recalled.count(True)
num_gt_object_boxes += len(gt_object_boxes)
num_gt_object_boxes_recalled += gt_object_boxes_recalled.count(True)
try:
connection_recall = num_gt_connections_recalled / num_gt_connections
except ZeroDivisionError:
connection_recall = None
try:
human_recall = num_gt_human_boxes_recalled / num_gt_human_boxes
except ZeroDivisionError:
human_recall = None
try:
object_recall = num_gt_object_boxes_recalled / num_gt_object_boxes
except ZeroDivisionError:
object_recall = None
stats = {
'connection_recall': connection_recall,
'human_recall': human_recall,
'object_recall': object_recall,
'num_gt_connections_recalled': num_gt_connections_recalled,
'num_gt_connections': num_gt_connections,
'num_gt_human_boxes_recalled': num_gt_human_boxes_recalled,
'num_gt_human_boxes': num_gt_human_boxes,
'num_gt_object_boxes_recalled': num_gt_object_boxes_recalled,
'num_gt_object_boxes': num_gt_object_boxes,
'num_connection_proposals': num_pred_connections,
'num_human_proposals': num_pred_human_boxes,
'num_object_proposals': num_pred_object_boxes,
}
return stats
def evaluate_boxes(exp_const,data_const):
select_boxes_dir = exp_const.exp_dir
select_boxes_h5py = os.path.join(
select_boxes_dir,
'selected_coco_cls_dets.hdf5')
select_boxes = h5py.File(select_boxes_h5py)
print('Loading anno_list.json ...')
anno_list = io.load_json_object(data_const.anno_list_json)
print('Evaluating box proposals ...')
evaluation_stats = {
'num_gt_connections_recalled': 0,
'num_gt_connections': 0,
'num_gt_human_boxes_recalled': 0,
'num_gt_human_boxes': 0,
'num_gt_object_boxes_recalled': 0,
'num_gt_object_boxes': 0,
'num_connection_proposals': 0,
'num_human_proposals': 0,
'num_object_proposals': 0,
}
index_error_misses = 0
num_images = 0
for anno in tqdm(anno_list):
global_id = anno['global_id']
if 'test' in global_id:
num_images += 1
else:
continue
# selected_dets_npy = os.path.join(
# select_boxes_dir,
# f'{global_id}_selected_dets.npy')
# selected_dets = np.load(selected_dets_npy)[()]
boxes_scores_rpn_ids = select_boxes[global_id]['boxes_scores_rpn_ids'][()]
start_end_ids = select_boxes[global_id]['start_end_ids'][()]
selected_dets = {'boxes':{}}
for cls_ind,cls_name in enumerate(COCO_CLASSES):
start_id, end_id = start_end_ids[cls_ind]
selected_dets['boxes'][cls_name] = boxes_scores_rpn_ids[start_id:end_id,:4]
human_boxes = selected_dets['boxes']['person']
object_boxes = []
object_labels = []
for cls_name in selected_dets['boxes']:
if cls_name in ['person','background']:
continue
cls_object_boxes = selected_dets['boxes'][cls_name]
object_boxes.append(cls_object_boxes)
object_labels += [cls_name]*cls_object_boxes.shape[0]
object_boxes = np.concatenate(object_boxes)
all_boxes = np.concatenate((human_boxes,object_boxes))
try:
recall_stats = box_recall(
anno['hois'],
all_boxes.tolist(),
all_boxes.tolist(),
exp_const.iou_thresh)
except IndexError:
index_error_misses += 1
num_images -= index_error_misses
for k in evaluation_stats.keys():
evaluation_stats[k]+= recall_stats[k]
evaluation_stats['human_recall'] = \
evaluation_stats['num_gt_human_boxes_recalled'] / \
evaluation_stats['num_gt_human_boxes']
evaluation_stats['object_recall'] = \
evaluation_stats['num_gt_object_boxes_recalled'] / \
evaluation_stats['num_gt_object_boxes']
evaluation_stats['connection_recall'] = \
evaluation_stats['num_gt_connections_recalled'] / \
evaluation_stats['num_gt_connections']
evaluation_stats['average_human_proposals_per_image'] = \
evaluation_stats['num_human_proposals'] / num_images
evaluation_stats['average_object_proposals_per_image'] = \
evaluation_stats['num_object_proposals'] / num_images
evaluation_stats['average_connection_proposals_per_image'] = \
evaluation_stats['average_human_proposals_per_image'] * \
evaluation_stats['average_object_proposals_per_image']
evaluation_stats['index_error_misses'] = index_error_misses
evaluation_stats_json = os.path.join(
exp_const.exp_dir,
f'eval_stats_boxes.json')
io.dump_json_object(evaluation_stats,evaluation_stats_json)
def evaluate_boxes_and_labels(exp_const,data_const):
select_boxes_dir = exp_const.exp_dir
select_boxes_h5py = os.path.join(
select_boxes_dir,
'selected_coco_cls_dets.hdf5')
select_boxes = h5py.File(select_boxes_h5py)
print('Loading anno_list.json ...')
anno_list = io.load_json_object(data_const.anno_list_json)
print('Loading hoi_list.json ...')
hoi_list = io.load_json_object(data_const.hoi_list_json)
print('Evaluating box proposals ...')
evaluation_stats = {
'num_gt_connections_recalled': 0,
'num_gt_connections': 0,
'num_gt_human_boxes_recalled': 0,
'num_gt_human_boxes': 0,
'num_gt_object_boxes_recalled': 0,
'num_gt_object_boxes': 0,
'num_connection_proposals': 0,
'num_human_proposals': 0,
'num_object_proposals': 0,
}
index_error_misses = 0
num_images = 0
for anno in tqdm(anno_list):
global_id = anno['global_id']
if 'test' in global_id:
num_images += 1
else:
continue
boxes_scores_rpn_ids = select_boxes[global_id]['boxes_scores_rpn_ids'][()]
start_end_ids = select_boxes[global_id]['start_end_ids'][()]
selected_dets = {'boxes':{}}
for cls_ind,cls_name in enumerate(COCO_CLASSES):
start_id, end_id = start_end_ids[cls_ind]
selected_dets['boxes'][cls_name] = boxes_scores_rpn_ids[start_id:end_id,:4]
human_boxes = selected_dets['boxes']['person']
object_boxes = []
object_labels = []
for cls_name in selected_dets['boxes']:
if cls_name in ['person','background']:
continue
cls_object_boxes = selected_dets['boxes'][cls_name]
object_boxes.append(cls_object_boxes)
object_labels += [cls_name]*cls_object_boxes.shape[0]
object_boxes = np.concatenate(object_boxes)
all_boxes = np.concatenate((human_boxes,object_boxes))
try:
recall_stats = box_label_recall(
anno['hois'],
human_boxes.tolist(),
object_boxes.tolist(),
object_labels,
exp_const.iou_thresh,
hoi_list)
except IndexError:
index_error_misses += 1
num_images -= index_error_misses
for k in evaluation_stats.keys():
evaluation_stats[k]+= recall_stats[k]
evaluation_stats['human_recall'] = \
evaluation_stats['num_gt_human_boxes_recalled'] / \
evaluation_stats['num_gt_human_boxes']
evaluation_stats['object_recall'] = \
evaluation_stats['num_gt_object_boxes_recalled'] / \
evaluation_stats['num_gt_object_boxes']
evaluation_stats['connection_recall'] = \
evaluation_stats['num_gt_connections_recalled'] / \
evaluation_stats['num_gt_connections']
evaluation_stats['average_human_proposals_per_image'] = \
evaluation_stats['num_human_proposals'] / num_images
evaluation_stats['average_object_proposals_per_image'] = \
evaluation_stats['num_object_proposals'] / num_images
evaluation_stats['average_connection_proposals_per_image'] = \
evaluation_stats['average_human_proposals_per_image'] * \
evaluation_stats['average_object_proposals_per_image']
evaluation_stats['index_error_misses'] = index_error_misses
evaluation_stats_json = os.path.join(
exp_const.exp_dir,
f'eval_stats_boxes_labels.json')
io.dump_json_object(evaluation_stats,evaluation_stats_json)
| 37.237113
| 89
| 0.663483
| 1,831
| 14,448
| 4.749317
| 0.079192
| 0.048298
| 0.060718
| 0.048298
| 0.900069
| 0.900069
| 0.897309
| 0.886385
| 0.886385
| 0.886385
| 0
| 0.005017
| 0.254983
| 14,448
| 388
| 90
| 37.237113
| 0.802861
| 0.031977
| 0
| 0.894231
| 0
| 0
| 0.158987
| 0.074056
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0.035256
| 0
| 0.054487
| 0.016026
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66c5e85cc528e98bfb38ab62220649ebb12134be
| 150
|
py
|
Python
|
loldib/getratings/models/NA/na_xerath/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_xerath/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_xerath/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_xerath_top import *
from .na_xerath_jng import *
from .na_xerath_mid import *
from .na_xerath_bot import *
from .na_xerath_sup import *
| 25
| 29
| 0.766667
| 25
| 150
| 4.2
| 0.36
| 0.285714
| 0.571429
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 5
| 30
| 30
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dd28cc6920c90dc4e1982b60908d1bf464ee0b45
| 92,776
|
py
|
Python
|
hubspot/cms/hubdb/api/tables_api.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 117
|
2020-04-06T08:22:53.000Z
|
2022-03-18T03:41:29.000Z
|
hubspot/cms/hubdb/api/tables_api.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 62
|
2020-04-06T16:21:06.000Z
|
2022-03-17T16:50:44.000Z
|
hubspot/cms/hubdb/api/tables_api.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 45
|
2020-04-06T16:13:52.000Z
|
2022-03-30T21:33:17.000Z
|
# coding: utf-8
"""
HubDB endpoints
HubDB is a relational data store that presents data as rows, columns, and cells in a table, much like a spreadsheet. HubDB tables can be added or modified [in the HubSpot CMS](https://knowledge.hubspot.com/cos-general/how-to-edit-hubdb-tables), but you can also use the API endpoints documented here. For more information on HubDB tables and using their data on a HubSpot site, see the [CMS developers site](https://designers.hubspot.com/docs/tools/hubdb). You can also see the [documentation for dynamic pages](https://designers.hubspot.com/docs/tutorials/how-to-build-dynamic-pages-with-hubdb) for more details about the `useForPages` field. HubDB tables support `draft` and `published` versions. This allows you to update data in the table, either for testing or to allow for a manual approval process, without affecting any live pages using the existing data. Draft data can be reviewed, and published by a user working in HubSpot or published via the API. Draft data can also be discarded, allowing users to go back to the published version of the data without disrupting it. If a table is set to be `allowed for public access`, you can access the published version of the table and rows without any authentication by specifying the portal id via the query parameter `portalId`. # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from hubspot.cms.hubdb.api_client import ApiClient
from hubspot.cms.hubdb.exceptions import ApiTypeError, ApiValueError # noqa: F401
class TablesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def archive_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Archive a table # noqa: E501
Archive (soft delete) an existing HubDB table. This archives both the published and draft versions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to archive. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.archive_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def archive_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Archive a table # noqa: E501
Archive (soft delete) an existing HubDB table. This archives both the published and draft versions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to archive. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method archive_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `archive_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}",
"DELETE",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def clone_draft_table(self, table_id_or_name, hub_db_table_clone_request, **kwargs): # noqa: E501
"""Clone a table # noqa: E501
Clone an existing HubDB table. The `newName` and `newLabel` of the new table can be sent as JSON in the `body` parameter. This will create the cloned table as a `draft`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clone_draft_table(table_id_or_name, hub_db_table_clone_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to clone. (required)
:param HubDbTableCloneRequest hub_db_table_clone_request: JSON object with the properties newName and newLabel. You can set copyRows to false to clone the table with copying rows and default is true. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.clone_draft_table_with_http_info(table_id_or_name, hub_db_table_clone_request, **kwargs) # noqa: E501
def clone_draft_table_with_http_info(self, table_id_or_name, hub_db_table_clone_request, **kwargs): # noqa: E501
"""Clone a table # noqa: E501
Clone an existing HubDB table. The `newName` and `newLabel` of the new table can be sent as JSON in the `body` parameter. This will create the cloned table as a `draft`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clone_draft_table_with_http_info(table_id_or_name, hub_db_table_clone_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to clone. (required)
:param HubDbTableCloneRequest hub_db_table_clone_request: JSON object with the properties newName and newLabel. You can set copyRows to false to clone the table with copying rows and default is true. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "hub_db_table_clone_request"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method clone_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `clone_draft_table`") # noqa: E501
# verify the required parameter 'hub_db_table_clone_request' is set
if self.api_client.client_side_validation and ("hub_db_table_clone_request" not in local_var_params or local_var_params["hub_db_table_clone_request"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `hub_db_table_clone_request` when calling `clone_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "hub_db_table_clone_request" in local_var_params:
body_params = local_var_params["hub_db_table_clone_request"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# HTTP header `Content-Type`
header_params["Content-Type"] = self.api_client.select_header_content_type(["application/json"]) # noqa: E501 # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft/clone",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def create_table(self, hub_db_table_v3_request, **kwargs): # noqa: E501
"""Create a new table # noqa: E501
Creates a new draft HubDB table given a JSON schema. The table name and label should be unique for each account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_table(hub_db_table_v3_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param HubDbTableV3Request hub_db_table_v3_request: The JSON schema for the table being created. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.create_table_with_http_info(hub_db_table_v3_request, **kwargs) # noqa: E501
def create_table_with_http_info(self, hub_db_table_v3_request, **kwargs): # noqa: E501
"""Create a new table # noqa: E501
Creates a new draft HubDB table given a JSON schema. The table name and label should be unique for each account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_table_with_http_info(hub_db_table_v3_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param HubDbTableV3Request hub_db_table_v3_request: The JSON schema for the table being created. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["hub_db_table_v3_request"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method create_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'hub_db_table_v3_request' is set
if self.api_client.client_side_validation and ("hub_db_table_v3_request" not in local_var_params or local_var_params["hub_db_table_v3_request"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `hub_db_table_v3_request` when calling `create_table`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "hub_db_table_v3_request" in local_var_params:
body_params = local_var_params["hub_db_table_v3_request"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# HTTP header `Content-Type`
header_params["Content-Type"] = self.api_client.select_header_content_type(["application/json"]) # noqa: E501 # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def export_draft_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Export a draft table # noqa: E501
Exports the `draft` version of a table to CSV / EXCEL format. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_draft_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to export. (required)
:param str format: The file format to export. Possible values include `CSV`, `XLSX`, and `XLS`.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.export_draft_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def export_draft_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Export a draft table # noqa: E501
Exports the `draft` version of a table to CSV / EXCEL format. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_draft_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to export. (required)
:param str format: The file format to export. Possible values include `CSV`, `XLSX`, and `XLS`.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(file, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "format"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method export_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `export_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "format" in local_var_params and local_var_params["format"] is not None: # noqa: E501
query_params.append(("format", local_var_params["format"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/vnd.ms-excel", "text/csv", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft/export",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="file", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def export_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Export a published version of a table # noqa: E501
Exports the `published` version of a table to CSV / EXCEL format. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to export. (required)
:param str format: The file format to export. Possible values include `CSV`, `XLSX`, and `XLS`.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.export_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def export_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Export a published version of a table # noqa: E501
Exports the `published` version of a table to CSV / EXCEL format. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to export. (required)
:param str format: The file format to export. Possible values include `CSV`, `XLSX`, and `XLS`.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(file, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "format"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method export_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `export_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "format" in local_var_params and local_var_params["format"] is not None: # noqa: E501
query_params.append(("format", local_var_params["format"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/vnd.ms-excel", "text/csv", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/export",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="file", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_all_draft_tables(self, **kwargs): # noqa: E501
"""Return all draft tables # noqa: E501
Returns the details for each draft table defined in the specified account, including column definitions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_draft_tables(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] sort: Specifies which fields to use for sorting results. Valid fields are `name`, `createdAt`, `updatedAt`, `createdBy`, `updatedBy`. `createdAt` will be used by default.
:param str after: The cursor token value to get the next set of results. You can get this from the `paging.next.after` JSON property of a paged response containing more results.
:param int limit: The maximum number of results to return. Default is 1000.
:param datetime created_at: Only return tables created at exactly the specified time.
:param datetime created_after: Only return tables created after the specified time.
:param datetime created_before: Only return tables created before the specified time.
:param datetime updated_at: Only return tables last updated at exactly the specified time.
:param datetime updated_after: Only return tables last updated after the specified time.
:param datetime updated_before: Only return tables last updated before the specified time.
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CollectionResponseWithTotalHubDbTableV3ForwardPaging
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.get_all_draft_tables_with_http_info(**kwargs) # noqa: E501
def get_all_draft_tables_with_http_info(self, **kwargs): # noqa: E501
"""Return all draft tables # noqa: E501
Returns the details for each draft table defined in the specified account, including column definitions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_draft_tables_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] sort: Specifies which fields to use for sorting results. Valid fields are `name`, `createdAt`, `updatedAt`, `createdBy`, `updatedBy`. `createdAt` will be used by default.
:param str after: The cursor token value to get the next set of results. You can get this from the `paging.next.after` JSON property of a paged response containing more results.
:param int limit: The maximum number of results to return. Default is 1000.
:param datetime created_at: Only return tables created at exactly the specified time.
:param datetime created_after: Only return tables created after the specified time.
:param datetime created_before: Only return tables created before the specified time.
:param datetime updated_at: Only return tables last updated at exactly the specified time.
:param datetime updated_after: Only return tables last updated after the specified time.
:param datetime updated_before: Only return tables last updated before the specified time.
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CollectionResponseWithTotalHubDbTableV3ForwardPaging, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["sort", "after", "limit", "created_at", "created_after", "created_before", "updated_at", "updated_after", "updated_before", "archived"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_all_draft_tables" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "sort" in local_var_params and local_var_params["sort"] is not None: # noqa: E501
query_params.append(("sort", local_var_params["sort"])) # noqa: E501
collection_formats["sort"] = "multi" # noqa: E501
if "after" in local_var_params and local_var_params["after"] is not None: # noqa: E501
query_params.append(("after", local_var_params["after"])) # noqa: E501
if "limit" in local_var_params and local_var_params["limit"] is not None: # noqa: E501
query_params.append(("limit", local_var_params["limit"])) # noqa: E501
if "created_at" in local_var_params and local_var_params["created_at"] is not None: # noqa: E501
query_params.append(("createdAt", local_var_params["created_at"])) # noqa: E501
if "created_after" in local_var_params and local_var_params["created_after"] is not None: # noqa: E501
query_params.append(("createdAfter", local_var_params["created_after"])) # noqa: E501
if "created_before" in local_var_params and local_var_params["created_before"] is not None: # noqa: E501
query_params.append(("createdBefore", local_var_params["created_before"])) # noqa: E501
if "updated_at" in local_var_params and local_var_params["updated_at"] is not None: # noqa: E501
query_params.append(("updatedAt", local_var_params["updated_at"])) # noqa: E501
if "updated_after" in local_var_params and local_var_params["updated_after"] is not None: # noqa: E501
query_params.append(("updatedAfter", local_var_params["updated_after"])) # noqa: E501
if "updated_before" in local_var_params and local_var_params["updated_before"] is not None: # noqa: E501
query_params.append(("updatedBefore", local_var_params["updated_before"])) # noqa: E501
if "archived" in local_var_params and local_var_params["archived"] is not None: # noqa: E501
query_params.append(("archived", local_var_params["archived"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/draft",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CollectionResponseWithTotalHubDbTableV3ForwardPaging", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_all_tables(self, **kwargs): # noqa: E501
"""Get all published tables # noqa: E501
Returns the details for the `published` version of each table defined in an account, including column definitions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_tables(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] sort: Specifies which fields to use for sorting results. Valid fields are `name`, `createdAt`, `updatedAt`, `createdBy`, `updatedBy`. `createdAt` will be used by default.
:param str after: The cursor token value to get the next set of results. You can get this from the `paging.next.after` JSON property of a paged response containing more results.
:param int limit: The maximum number of results to return. Default is 1000.
:param datetime created_at: Only return tables created at exactly the specified time.
:param datetime created_after: Only return tables created after the specified time.
:param datetime created_before: Only return tables created before the specified time.
:param datetime updated_at: Only return tables last updated at exactly the specified time.
:param datetime updated_after: Only return tables last updated after the specified time.
:param datetime updated_before: Only return tables last updated before the specified time.
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CollectionResponseWithTotalHubDbTableV3ForwardPaging
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.get_all_tables_with_http_info(**kwargs) # noqa: E501
def get_all_tables_with_http_info(self, **kwargs): # noqa: E501
"""Get all published tables # noqa: E501
Returns the details for the `published` version of each table defined in an account, including column definitions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_tables_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] sort: Specifies which fields to use for sorting results. Valid fields are `name`, `createdAt`, `updatedAt`, `createdBy`, `updatedBy`. `createdAt` will be used by default.
:param str after: The cursor token value to get the next set of results. You can get this from the `paging.next.after` JSON property of a paged response containing more results.
:param int limit: The maximum number of results to return. Default is 1000.
:param datetime created_at: Only return tables created at exactly the specified time.
:param datetime created_after: Only return tables created after the specified time.
:param datetime created_before: Only return tables created before the specified time.
:param datetime updated_at: Only return tables last updated at exactly the specified time.
:param datetime updated_after: Only return tables last updated after the specified time.
:param datetime updated_before: Only return tables last updated before the specified time.
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CollectionResponseWithTotalHubDbTableV3ForwardPaging, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["sort", "after", "limit", "created_at", "created_after", "created_before", "updated_at", "updated_after", "updated_before", "archived"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_all_tables" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
collection_formats = {}
path_params = {}
query_params = []
if "sort" in local_var_params and local_var_params["sort"] is not None: # noqa: E501
query_params.append(("sort", local_var_params["sort"])) # noqa: E501
collection_formats["sort"] = "multi" # noqa: E501
if "after" in local_var_params and local_var_params["after"] is not None: # noqa: E501
query_params.append(("after", local_var_params["after"])) # noqa: E501
if "limit" in local_var_params and local_var_params["limit"] is not None: # noqa: E501
query_params.append(("limit", local_var_params["limit"])) # noqa: E501
if "created_at" in local_var_params and local_var_params["created_at"] is not None: # noqa: E501
query_params.append(("createdAt", local_var_params["created_at"])) # noqa: E501
if "created_after" in local_var_params and local_var_params["created_after"] is not None: # noqa: E501
query_params.append(("createdAfter", local_var_params["created_after"])) # noqa: E501
if "created_before" in local_var_params and local_var_params["created_before"] is not None: # noqa: E501
query_params.append(("createdBefore", local_var_params["created_before"])) # noqa: E501
if "updated_at" in local_var_params and local_var_params["updated_at"] is not None: # noqa: E501
query_params.append(("updatedAt", local_var_params["updated_at"])) # noqa: E501
if "updated_after" in local_var_params and local_var_params["updated_after"] is not None: # noqa: E501
query_params.append(("updatedAfter", local_var_params["updated_after"])) # noqa: E501
if "updated_before" in local_var_params and local_var_params["updated_before"] is not None: # noqa: E501
query_params.append(("updatedBefore", local_var_params["updated_before"])) # noqa: E501
if "archived" in local_var_params and local_var_params["archived"] is not None: # noqa: E501
query_params.append(("archived", local_var_params["archived"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CollectionResponseWithTotalHubDbTableV3ForwardPaging", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_draft_table_details_by_id(self, table_id_or_name, **kwargs): # noqa: E501
"""Get details for a draft table # noqa: E501
Get the details for the `draft` version of a specific HubDB table. This will include the definitions for the columns in the table and the number of rows in the table. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_draft_table_details_by_id(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to return. (required)
:param bool archived: Set this to `true` to return an archived table. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.get_draft_table_details_by_id_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def get_draft_table_details_by_id_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Get details for a draft table # noqa: E501
Get the details for the `draft` version of a specific HubDB table. This will include the definitions for the columns in the table and the number of rows in the table. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_draft_table_details_by_id_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to return. (required)
:param bool archived: Set this to `true` to return an archived table. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "archived", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_draft_table_details_by_id" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `get_draft_table_details_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "archived" in local_var_params and local_var_params["archived"] is not None: # noqa: E501
query_params.append(("archived", local_var_params["archived"])) # noqa: E501
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def get_table_details(self, table_id_or_name, **kwargs): # noqa: E501
"""Get details for a published table # noqa: E501
Returns the details for the `published` version of the specified table. This will include the definitions for the columns in the table and the number of rows in the table. **Note:** This endpoint can be accessed without any authentication if the table is set to be allowed for public access. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_table_details(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to return. (required)
:param bool archived: Set this to `true` to return details for an archived table. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.get_table_details_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def get_table_details_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Get details for a published table # noqa: E501
Returns the details for the `published` version of the specified table. This will include the definitions for the columns in the table and the number of rows in the table. **Note:** This endpoint can be accessed without any authentication if the table is set to be allowed for public access. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_table_details_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to return. (required)
:param bool archived: Set this to `true` to return details for an archived table. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "archived", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_table_details" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `get_table_details`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "archived" in local_var_params and local_var_params["archived"] is not None: # noqa: E501
query_params.append(("archived", local_var_params["archived"])) # noqa: E501
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def import_draft_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Import data into draft table # noqa: E501
Import the contents of a CSV file into an existing HubDB table. The data will always be imported into the `draft` version of the table. Use `/publish` endpoint to push these changes to `published` version. This endpoint takes a multi-part POST request. The first part will be a set of JSON-formatted options for the import and you can specify this with the name as `config`. The second part will be the CSV file you want to import and you can specify this with the name as `file`. Refer the [overview section](https://developers.hubspot.com/docs/api/cms/hubdb#importing-tables) to check the details and format of the JSON-formatted options for the import. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_draft_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID of the destination table where data will be imported. (required)
:param str config: Configuration for the import in JSON format as described above.
:param file file: The source CSV file to be imported.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ImportResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.import_draft_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def import_draft_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Import data into draft table # noqa: E501
Import the contents of a CSV file into an existing HubDB table. The data will always be imported into the `draft` version of the table. Use `/publish` endpoint to push these changes to `published` version. This endpoint takes a multi-part POST request. The first part will be a set of JSON-formatted options for the import and you can specify this with the name as `config`. The second part will be the CSV file you want to import and you can specify this with the name as `file`. Refer the [overview section](https://developers.hubspot.com/docs/api/cms/hubdb#importing-tables) to check the details and format of the JSON-formatted options for the import. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_draft_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID of the destination table where data will be imported. (required)
:param str config: Configuration for the import in JSON format as described above.
:param file file: The source CSV file to be imported.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ImportResult, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "config", "file"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method import_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `import_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if "config" in local_var_params:
form_params.append(("config", local_var_params["config"])) # noqa: E501
if "file" in local_var_params:
local_var_files["file"] = local_var_params["file"] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# HTTP header `Content-Type`
header_params["Content-Type"] = self.api_client.select_header_content_type(["multipart/form-data"]) # noqa: E501 # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft/import",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="ImportResult", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def publish_draft_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Publish a table from draft # noqa: E501
Publishes the table by copying the data and table schema changes from draft version to the published version, meaning any website pages using data from the table will be updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.publish_draft_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to publish. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.publish_draft_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def publish_draft_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Publish a table from draft # noqa: E501
Publishes the table by copying the data and table schema changes from draft version to the published version, meaning any website pages using data from the table will be updated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.publish_draft_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to publish. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method publish_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `publish_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft/publish",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def reset_draft_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Reset a draft table # noqa: E501
Replaces the data in the `draft` version of the table with values from the `published` version. Any unpublished changes in the `draft` will be lost after this call is made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_draft_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to reset. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.reset_draft_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def reset_draft_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Reset a draft table # noqa: E501
Replaces the data in the `draft` version of the table with values from the `published` version. Any unpublished changes in the `draft` will be lost after this call is made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_draft_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to reset. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method reset_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `reset_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft/reset",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def unpublish_table(self, table_id_or_name, **kwargs): # noqa: E501
"""Unpublish a table # noqa: E501
Unpublishes the table, meaning any website pages using data from the table will not render any data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unpublish_table(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to publish. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.unpublish_table_with_http_info(table_id_or_name, **kwargs) # noqa: E501
def unpublish_table_with_http_info(self, table_id_or_name, **kwargs): # noqa: E501
"""Unpublish a table # noqa: E501
Unpublishes the table, meaning any website pages using data from the table will not render any data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unpublish_table_with_http_info(table_id_or_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to publish. (required)
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the response.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method unpublish_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `unpublish_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/unpublish",
"POST",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
def update_draft_table(self, table_id_or_name, hub_db_table_v3_request, **kwargs): # noqa: E501
"""Update an existing table # noqa: E501
Update an existing HubDB table. You can use this endpoint to add or remove columns to the table as well as restore an archived table. Tables updated using the endpoint will only modify the `draft` verion of the table. Use `publish` endpoint to push all the changes to the `published` version. To restore a table, include the query parameter `archived=true` and `\"archived\": false` in the json body. **Note:** You need to include all the columns in the input when you are adding/removing/updating a column. If you do not include an already existing column in the request, it will be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_draft_table(table_id_or_name, hub_db_table_v3_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to update. (required)
:param HubDbTableV3Request hub_db_table_v3_request: The JSON schema for the table being updated. (required)
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HubDbTableV3
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
return self.update_draft_table_with_http_info(table_id_or_name, hub_db_table_v3_request, **kwargs) # noqa: E501
def update_draft_table_with_http_info(self, table_id_or_name, hub_db_table_v3_request, **kwargs): # noqa: E501
"""Update an existing table # noqa: E501
Update an existing HubDB table. You can use this endpoint to add or remove columns to the table as well as restore an archived table. Tables updated using the endpoint will only modify the `draft` verion of the table. Use `publish` endpoint to push all the changes to the `published` version. To restore a table, include the query parameter `archived=true` and `\"archived\": false` in the json body. **Note:** You need to include all the columns in the input when you are adding/removing/updating a column. If you do not include an already existing column in the request, it will be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_draft_table_with_http_info(table_id_or_name, hub_db_table_v3_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str table_id_or_name: The ID or name of the table to update. (required)
:param HubDbTableV3Request hub_db_table_v3_request: The JSON schema for the table being updated. (required)
:param bool archived: Specifies whether to return archived tables. Defaults to `false`.
:param bool include_foreign_ids: Set this to `true` to populate foreign ID values in the result.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HubDbTableV3, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ["table_id_or_name", "hub_db_table_v3_request", "archived", "include_foreign_ids"]
all_params.extend(["async_req", "_return_http_data_only", "_preload_content", "_request_timeout"])
for key, val in six.iteritems(local_var_params["kwargs"]):
if key not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method update_draft_table" % key)
local_var_params[key] = val
del local_var_params["kwargs"]
# verify the required parameter 'table_id_or_name' is set
if self.api_client.client_side_validation and ("table_id_or_name" not in local_var_params or local_var_params["table_id_or_name"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `table_id_or_name` when calling `update_draft_table`") # noqa: E501
# verify the required parameter 'hub_db_table_v3_request' is set
if self.api_client.client_side_validation and ("hub_db_table_v3_request" not in local_var_params or local_var_params["hub_db_table_v3_request"] is None): # noqa: E501 # noqa: E501
raise ApiValueError("Missing the required parameter `hub_db_table_v3_request` when calling `update_draft_table`") # noqa: E501
collection_formats = {}
path_params = {}
if "table_id_or_name" in local_var_params:
path_params["tableIdOrName"] = local_var_params["table_id_or_name"] # noqa: E501
query_params = []
if "archived" in local_var_params and local_var_params["archived"] is not None: # noqa: E501
query_params.append(("archived", local_var_params["archived"])) # noqa: E501
if "include_foreign_ids" in local_var_params and local_var_params["include_foreign_ids"] is not None: # noqa: E501
query_params.append(("includeForeignIds", local_var_params["include_foreign_ids"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "hub_db_table_v3_request" in local_var_params:
body_params = local_var_params["hub_db_table_v3_request"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(["application/json", "*/*"]) # noqa: E501
# HTTP header `Content-Type`
header_params["Content-Type"] = self.api_client.select_header_content_type(["application/json"]) # noqa: E501 # noqa: E501
# Authentication setting
auth_settings = ["hapikey", "oauth2_legacy"] # noqa: E501
return self.api_client.call_api(
"/cms/v3/hubdb/tables/{tableIdOrName}/draft",
"PATCH",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="HubDbTableV3", # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get("async_req"),
_return_http_data_only=local_var_params.get("_return_http_data_only"), # noqa: E501
_preload_content=local_var_params.get("_preload_content", True),
_request_timeout=local_var_params.get("_request_timeout"),
collection_formats=collection_formats,
)
| 57.660659
| 1,302
| 0.64922
| 11,736
| 92,776
| 4.885225
| 0.034339
| 0.041023
| 0.06471
| 0.034919
| 0.968168
| 0.965151
| 0.963494
| 0.96168
| 0.960581
| 0.95854
| 0
| 0.01474
| 0.276052
| 92,776
| 1,608
| 1,303
| 57.696517
| 0.838874
| 0.516233
| 0
| 0.820896
| 0
| 0
| 0.228957
| 0.053269
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043284
| false
| 0
| 0.01791
| 0
| 0.104478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd4deb999e4f4db398b36908d68a20c06dcb3987
| 82,968
|
py
|
Python
|
lib_app.py
|
nihaalnz/Library-App-Python-tkinter
|
1c89c40f311dfd6866daa715267f2fcbdabe0d26
|
[
"MIT"
] | 2
|
2020-07-22T19:17:32.000Z
|
2021-04-24T17:17:16.000Z
|
lib_app.py
|
nihaalnz/Library-App-Python-tkinter
|
1c89c40f311dfd6866daa715267f2fcbdabe0d26
|
[
"MIT"
] | null | null | null |
lib_app.py
|
nihaalnz/Library-App-Python-tkinter
|
1c89c40f311dfd6866daa715267f2fcbdabe0d26
|
[
"MIT"
] | null | null | null |
# Importing modules
from tkinter import *
import tkinter as tk
from tkinter import ttk
from tkinter.font import Font
from tkinter import messagebox
import mysql.connector as mysql
from tkcalendar import DateEntry
import datetime
import webbrowser
import babel.numbers
from datetime import datetime as dt
from PIL import Image, ImageTk
import pygame
import time
import widgets as ctk
# Main window and initialization of modules
root = Tk()
root.title('Library Manager')
root.geometry('469x390+300+300')
root.iconbitmap('images/main.ico')
root.resizable(False, False)
pygame.mixer.init()
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
# Assigning font for the GUI
font_text = Font(family='helvetica', size='11')
# Function to show all logs
def all_logs():
log = Toplevel(root)
log.title('View all Visitors')
log.focus_force()
log.iconbitmap('images/all_log.ico')
log.geometry('+150+200')
log.resizable(False, False)
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def treeview_sort_column(tv, col, reverse):
l = [(tv.set(k, col), k) for k in tv.get_children('')]
l.sort(reverse=reverse)
# rearrange items in sorted positions
for index, (val, k) in enumerate(l):
tv.move(k, '', index)
# reverse sort next time
tv.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tv, _col, not reverse))
# Number of data fetched
cone = mysql.connect(host='', user='',
password='', database='')
ca = cone.cursor()
sql_command_01 = 'SELECT * FROM borrow;'
ca.execute(sql_command_01)
result = ca.fetchall()
# setup treeview
columns = (('ID', 80), ('S ID', 80), ('S Name', 300), ('Title of the book', 500), ('Accession no. of book', 95),
('Date Taken', 100), ('Due Date', 100), ('Date_Returned', 110), ('Status', 80))
tree = ttk.Treeview(log, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col, command=lambda _col=col: treeview_sort_column(tree, _col, False))
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_1 = 'SELECT * FROM borrow;'
c.execute(sql_command_1)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(log, orient=tk.VERTICAL, command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
btn = tk.Button(log, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
status = Label(
log, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
con.close()
cone.close()
# Fuction to show specific logs
def sp_logs():
global q_mark_new, e_sch
master = Toplevel(root)
master.title('Search Book')
master.focus_force()
master.geometry('+500+200')
master.resizable(False, False)
master.iconbitmap('images/visitors.ico')
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
e_sch = ttk.Entry(master)
drop = ttk.Combobox(master, value=['Search by....', 'Sl.no.', 'Student ID', 'Student Name',
'Title', 'Accession no. of book', 'Date Taken', 'Due Date', 'Date Returned', 'Approximate Accession no.', 'Approximate Sl.no.', 'Status'], state='readonly')
drop.current(0)
def dbase(event=None):
a = drop.get()
if a == 'Search by....':
messagebox.showerror(
'No choice given', 'Please choose a valid option to search by....', parent=master)
master.focus_force()
elif a == 'Sl.no.' or a == 'Date Taken' or a == 'Date Returned' or a == 'Due Date' or a == 'Accession no. of book':
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=master)
master.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_2 = 'SELECT * from borrow where `{}` = %s;'
sql_command_2 = sql_command_2.format(a)
values = (e_sch.get(),)
c.execute(sql_command_2, values)
recs = c.fetchall()
if recs == []:
messagebox.showerror(
'Does not exist', "No such data found here, make sure you've entered the correct information", parent=master)
master.focus_force()
else:
log = Toplevel(root)
log.title('View all Visitors')
log.focus_force()
log.iconbitmap('images/all_log.ico')
log.resizable(False, False)
log.geometry('+150+200')
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def treeview_sort_column(tv, col, reverse):
l = [(tv.set(k, col), k) for k in tv.get_children('')]
l.sort(reverse=reverse)
# rearrange items in sorted positions
for index, (val, k) in enumerate(l):
tv.move(k, '', index)
# reverse sort next time
tv.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tv, _col, not reverse))
# setup treeview
columns = (('Sl.no', 80), ('S ID', 80), ('S Name', 300), ('Title of the book', 500), ('Accession no. of book', 90),
('Date Taken', 100), ('Due Date', 100), ('Date_Returned', 110), ('Status', 80))
tree = ttk.Treeview(log, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tree, _col, False))
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_3 = f'SELECT * from borrow where `{a}` = "{e_sch.get()}";'
c.execute(sql_command_3)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(log, orient=tk.VERTICAL,
command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
btn = tk.Button(log, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
status = Label(
log, text=f'Total records fetched: {len(recs)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
elif a == 'Approximate Accession no.':
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=master)
master.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_28 = 'SELECT * from borrow where `Accession no. of book` REGEXP %s;'
values_28 = (e_sch.get(),)
c.execute(sql_command_28, values_28)
recs = c.fetchall()
if recs == []:
messagebox.showerror(
'Does not exist', "No such data found here, make sure you've entered the correct information", parent=master)
master.focus_force()
else:
log = Toplevel(root)
log.title('View all Visitors')
log.focus_force()
log.iconbitmap('images/all_log.ico')
log.geometry('+150+200')
log.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def treeview_sort_column(tv, col, reverse):
l = [(tv.set(k, col), k) for k in tv.get_children('')]
l.sort(reverse=reverse)
# rearrange items in sorted positions
for index, (val, k) in enumerate(l):
tv.move(k, '', index)
# reverse sort next time
tv.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tv, _col, not reverse))
# setup treeview
columns = (('Sl.no', 80), ('S ID', 80), ('S Name', 300), ('Title of the book', 500), ('Accession no. of book', 90),
('Date Taken', 100), ('Due Date', 100), ('Date_Returned', 110), ('Status', 80))
tree = ttk.Treeview(log, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tree, _col, False))
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_29 = f'SELECT * from borrow where `Accession no. of book` REGEXP %s;'
values_29 = (e_sch.get(),)
c.execute(sql_command_29, values_29)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(log, orient=tk.VERTICAL,
command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
btn = tk.Button(log, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
status = Label(
log, text=f'Total records fetched: {len(recs)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
elif a == 'Approximate Sl.no.':
if e_sch.get() == '':
messagebox.showerror(
'No data entered', 'Please enter a data in the box to search', parent=master)
master.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_30 = 'SELECT * from borrow where `sl.no.` REGEXP %s;'
values_30 = (e_sch.get(),)
c.execute(sql_command_30, values_30)
recs = c.fetchall()
if recs == []:
messagebox.showerror(
'Does not exist', "No such data found here, make sure you've entered the correct information", parent=master)
master.focus_force()
else:
log = Toplevel(root)
log.title('View all Visitors')
log.focus_force()
log.iconbitmap('images/all_log.ico')
log.geometry('+150+200')
log.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def treeview_sort_column(tv, col, reverse):
l = [(tv.set(k, col), k) for k in tv.get_children('')]
l.sort(reverse=reverse)
# rearrange items in sorted positions
for index, (val, k) in enumerate(l):
tv.move(k, '', index)
# reverse sort next time
tv.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tv, _col, not reverse))
# setup treeview
columns = (('Sl.no', 80), ('S ID', 80), ('S Name', 300), ('Title of the book', 500), ('Accession no. of book', 90),
('Date Taken', 100), ('Due Date', 100), ('Date_Returned', 110), ('Status', 80))
tree = ttk.Treeview(log, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tree, _col, False))
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_31 = f'SELECT * from borrow where `sl.no.` REGEXP %s;'
values_31 = (e_sch.get(),)
c.execute(sql_command_31, values_31)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(log, orient=tk.VERTICAL,
command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
btn = tk.Button(log, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
status = Label(
log, text=f'Total records fetched: {len(recs)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
else:
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=master)
master.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_4 = 'SELECT * from borrow where `{}` REGEXP %s;'
sql_command_4 = sql_command_4.format(a)
values_4 = (e_sch.get(),)
c.execute(sql_command_4, values_4)
recs = c.fetchall()
if recs == []:
messagebox.showerror(
'Does not exist', "No such data found here, make sure you've entered the write information", parent=master)
master.focus_force()
else:
log = Toplevel(root)
log.title('View all Visitors')
log.focus_force()
log.iconbitmap('images/all_log.ico')
log.geometry('+150+200')
log.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def treeview_sort_column(tv, col, reverse):
l = [(tv.set(k, col), k) for k in tv.get_children('')]
l.sort(reverse=reverse)
# rearrange items in sorted positions
for index, (val, k) in enumerate(l):
tv.move(k, '', index)
# reverse sort next time
tv.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tv, _col, not reverse))
# setup treeview
columns = (('Sl.no', 80), ('S ID', 80), ('S Name', 300), ('Title of the book', 500), ('Accession no. of book', 90),
('Date Taken', 100), ('Due Date', 100), ('Date_Returned', 110), ('Status', 80))
tree = ttk.Treeview(log, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col, command=lambda _col=col: treeview_sort_column(
tree, _col, False))
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
sql_command_5 = 'SELECT * from borrow where `{}` REGEXP %s;'
sql_command_5 = sql_command_5.format(a)
values_5 = (e_sch.get(),)
c.execute(sql_command_5, values_5)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(log, orient=tk.VERTICAL,
command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
btn = tk.Button(log, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
status = Label(
log, text=f'Total records fetched: {len(recs)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
master.destroy()
def key_pressed(event):
pygame.mixer.music.load('audio/type.mp3')
pygame.mixer.music.play()
def clicker(event):
pygame.mixer.music.load('audio/click.mp3')
pygame.mixer.music.play()
def change(event):
global e_sch
if drop.get() != 'Search by....':
l2.config(text=f'Enter {drop.get()}')
else:
l2.config(text='Enter')
l = Label(master, text='Search Visitors',
font=Font(family='helvetica', size='20'))
l2 = Label(master, text='Enter', font=font_text)
l3 = Label(master, text='NOTE: Enter date only in yyyy-mm-dd format ',
font=font_text, fg='red')
btn_log = Button(master, text='Show', font=font_text, command=dbase)
btn_ext = Button(master, text='Close', font=font_text,
command=out, fg='red')
l.grid(row=0, columnspan=3, pady=20, padx=20)
l2.grid(row=2, column=0, pady=20, padx=20)
l3.grid(row=3, column=0, pady=20, padx=20, columnspan=3, sticky=E+W)
btn_log.grid(row=4, columnspan=3, sticky=E+W, pady=(20, 0), ipadx=200)
btn_ext.grid(row=5, columnspan=3, sticky=E+W, ipadx=200)
drop.grid(row=1, column=0, columnspan=3, pady=20, padx=20, ipady=5)
e_sch.grid(row=2, column=1, pady=20, padx=20, ipady=5)
e_sch.bind_all('<Key>', key_pressed)
e_sch.bind_all('<Return>', dbase)
btn_log.bind('<Button-1>', clicker)
drop.bind('<<ComboboxSelected>>',change)
e_sch.focus_force()
q_mark_1 = Label(master, image=q_mark_new)
q_mark_1.grid(row=1, column=2, padx=(0, 10), sticky=W)
q_mark_2 = Label(master, image=q_mark_new)
q_mark_2.grid(row=2, column=2, padx=(0, 10), sticky=W)
# Creating a tooltip for each ? icon
ctk.ToolTip(q_mark_1, 'Select by:\nChoose how you want to search for visitors')
ctk.ToolTip(q_mark_2, 'Book:\nEnter the corresponding information of the visitor or the book borrowed\nMake sure to enter date in yyyy-mm-dd format if chosen')
# Function to return the book
def return_b():
global q_mark_new
returner = Toplevel(root)
returner.title('Return a book')
returner.focus_force()
returner.iconbitmap('images/retbor.ico')
returner.geometry('+700+200')
returner.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def dbase():
a = drop.get()
if a == 'Select by....':
messagebox.showerror(
'No choice given', 'Please choose a valid option to select by....', parent=returner)
returner.focus_force()
elif a == 'Sl.no.':
if e_sch.get() == '' or e_bok.get() == '':
messagebox.showerror(
'Fill all the blanks', 'Make sure to fill in all the blanks', parent=returner)
returner.focus_force()
else:
try:
# Searching book details
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_6 = "SELECT * FROM books where `sl.no.` = %s;"
values_6 = (e_bok.get(),)
c.execute(sql_command_6, values_6)
rec = c.fetchall()
b = rec[0][1]
accession = rec[0][3]
# Getting records of borrower
sql_command_7 = 'SELECT * FROM borrow where `Accession no. of book` = %s ORDER BY `sl.no.` DESC LIMIT 1;'
values_7 = (accession,)
c.execute(sql_command_7, values_7)
records = c.fetchall()
status = records[0][8]
title = records[0][3]
s_id = records[0][1]
if status == 'Borrowing' and s_id == e_sch.get():
choice = messagebox.askyesno(
'Are you sure', f'Are you sure you want to return {title} taken by ID: {s_id}', parent=returner)
returner.focus_force()
if choice:
try:
sql_command_8 = 'UPDATE borrow set `status`="Returned",`date returned`=%s WHERE `STUDENT ID`=%s and `Accession no. of book`=%s ORDER BY `Sl.no.` DESC LIMIT 1;'
values_8 = str(
b_cal.get()), e_sch.get(), accession
c.execute(sql_command_8, values_8)
c.execute('commit')
con.close()
messagebox.showinfo(
'Success', f"'{title}' has been returned successfully", parent=returner)
returner.focus_force()
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_9 = 'UPDATE books set `availability`="Yes" WHERE `Accession no.`=%s;'
values_9 = (accession,)
c.execute(sql_command_9, values_9)
c.execute('commit')
con.close()
e_sch.delete(0, END)
e_bok.delete(0, END)
drop.current(0)
except NameError:
messagebox.showerror(
'Choose the date', 'Make sure to choose the date of returning', parent=returner)
returner.focus_force()
else:
returner.focus_force()
else:
messagebox.showerror(
'Error', f'No such book is being borrowed by ID: {e_sch.get()}', parent=returner)
returner.focus_force()
except IndexError:
messagebox.showerror(
'Error', f'No such book is being borrowed by ID: {e_sch.get()}', parent=returner)
returner.focus_force()
elif a == 'Accession no.':
if e_sch.get() == '' or e_bok.get() == '':
messagebox.showerror(
'Fill all the blanks', 'Make sure to fill in all the blanks', parent=returner)
returner.focus_force()
else:
try:
# Searching book details
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_10 = "SELECT * FROM books where `Accession no.` = %s;"
values_10 = (e_bok.get(),)
c.execute(sql_command_10, values_10)
rec = c.fetchall()
b = rec[0][1]
accession = rec[0][3]
# Getting records of borrower
sql_command_11 = 'SELECT * FROM borrow where `Accession no. of book` = %s ORDER BY `sl.no.` DESC LIMIT 1;'
values_11 = (accession,)
c.execute(sql_command_11, values_11)
records = c.fetchall()
status = records[0][8]
title = records[0][3]
s_id = records[0][1]
if status == 'Borrowing' and s_id == e_sch.get():
choice = messagebox.askyesno(
'Are you sure', f'Are you sure you want to return {title} taken by ID: {s_id}', parent=returner)
returner.focus_force()
if choice:
try:
sql_command_12 = 'UPDATE borrow set `status`="Returned",`date returned`=%s WHERE `STUDENT ID`=%s and `Accession no. of book`=%s ORDER BY `Sl.no.` DESC LIMIT 1;'
values_12 = str(
b_cal.get()), e_sch.get(), accession
c.execute(sql_command_12, values_12)
c.execute('commit')
con.close()
messagebox.showinfo(
'Success', f'"{title}" has been returned successfully', parent=returner)
returner.focus_force()
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_13 = 'UPDATE books set `availability`="Yes" WHERE `Accession no.`=%s;'
values_13 = (accession,)
c.execute(sql_command_13, values_13)
c.execute('commit')
con.close()
e_sch.delete(0, END)
e_bok.delete(0, END)
drop.current(0)
except NameError:
messagebox.showerror(
'Choose the date', 'Make sure to choose the date of returning', parent=returner)
returner.focus_force()
else:
returner.focus_force()
else:
messagebox.showinfo(
'Error', f'No such book is being borrowed by ID: {e_sch.get()}', parent=returner)
returner.focus_force()
except IndexError:
messagebox.showinfo(
'Error', f'No such book is being borrowed by ID: {e_sch.get()}', parent=returner)
returner.focus_force()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
returner.destroy()
def key_pressed(event):
pygame.mixer.music.load('audio/type.mp3')
pygame.mixer.music.play()
def clicker(event):
pygame.mixer.music.load('audio/click.mp3')
pygame.mixer.music.play()
def change(event):
if drop.get() != 'Select by....':
l3.config(text=f'Enter {drop.get()}')
else:
l3.config(text='Enter')
def validation(inp):
if inp == '':
return True
elif inp.isdigit():
return True
else:
return False
def length_id(*args):
if len(e_sch.get()) > 6:
var_id.set(e_sch.get()[:-1])
vcmd = returner.register(validation)
var_id = StringVar()
drop = ttk.Combobox(
returner, value=['Select by....', 'Sl.no.', 'Accession no.'], state='readonly')
drop.current(0)
l0 = Label(returner, text='Return a book',
font=Font(family='helvetica', size='20'))
l1 = Label(returner, text='Student ID', font=font_text)
e_sch = ttk.Entry(returner,validate='all',validatecommand=(vcmd,'%P'),textvariable=var_id)
l2 = Label(returner, text='Choose Date of Returning', font=font_text)
l3 = Label(returner, text='Enter', font=font_text)
e_bok = ttk.Entry(returner,validate='all',validatecommand=(vcmd,'%P'))
b_cal = DateEntry(returner, font="Arial 12", selectmode='day', year=int(datetime.datetime.now().strftime("%Y")), month=int(datetime.datetime.now().strftime("%m")), day=int(datetime.datetime.now().strftime("%d")),date_pattern='y-mm-dd')
b_ret = Button(returner, text='Return book', command=dbase, font=font_text)
b_ext = Button(returner, text='Close',
command=out, font=font_text)
l0.grid(row=0, columnspan=3, pady=20)
l1.grid(row=1, column=0, padx=30, pady=(10, 0), sticky=W)
e_sch.grid(row=1, column=1, padx=30, ipady=5, pady=(10, 0))
l2.grid(row=4, column=0, padx=30, sticky=W)
drop.grid(row=2, columnspan=3, pady=20, ipadx=10, ipady=5)
l3.grid(row=3, column=0, padx=30, sticky=W)
e_bok.grid(row=3, column=1, padx=30, ipady=5, pady=10)
b_cal.grid(row=4, column=1, ipadx=16, pady=10)
b_ret.grid(row=6, columnspan=2, sticky=E+W, padx=30, pady=10)
b_ext.grid(row=7, columnspan=2, sticky=E+W, padx=30, pady=10)
e_sch.bind_all('<Key>', key_pressed)
e_bok.bind_all('<Key>', key_pressed)
b_ret.bind('<Button-1>', clicker)
drop.bind('<<ComboboxSelected>>',change)
var_id.trace('w',length_id)
q_mark_1 = Label(returner, image=q_mark_new)
q_mark_1.grid(row=1, column=2, padx=(0, 10))
q_mark_2 = Label(returner, image=q_mark_new)
q_mark_2.grid(row=2, column=1, padx=(0, 10))
q_mark_3 = Label(returner, image=q_mark_new)
q_mark_3.grid(row=3, column=2, padx=(0, 10))
q_mark_4 = Label(returner, image=q_mark_new)
q_mark_4.grid(row=4, column=2, padx=(0, 10))
q_mark_5 = Label(returner, image=q_mark_new)
q_mark_5.grid(row=6, column=2, padx=(0, 10))
# Creating a tooltip for each ? icon
ctk.ToolTip(q_mark_1, "Student ID:\nEnter your valid student ID")
ctk.ToolTip(q_mark_2, 'Select by:\nChoose how you want to select your book')
ctk.ToolTip(q_mark_3, 'Book:\nEnter the corresponding information of the book')
ctk.ToolTip(q_mark_4, 'Date of returning:\nClick to choose the date of returning')
ctk.ToolTip(q_mark_5, 'Return Book:\nClick to return the book')
# Function to borrow the book
def borrow():
global q_mark_new
borrower = Toplevel(root)
borrower.title('Borrow a book')
borrower.iconbitmap('images/retbor.ico')
borrower.resizable(False, False)
borrower.geometry('+600+200')
borrower.focus_force()
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def dbase():
a = drop.get()
if a == 'Select by....':
messagebox.showerror(
'No choice given', 'Please choose a valid option to select by....', parent=borrower)
borrower.focus_force()
elif a == 'Sl.no.':
if e_bok.get() == '' or e_nme.get() == '' or e_sch.get() == '':
messagebox.showerror(
'Fill all the blanks', 'Make sure to fill in all the blanks', parent=borrower)
borrower.focus_force()
else:
if int(e_bok.get()) > 5563:
messagebox.showerror('Book does not exist',
'Sorry, no such book is found here', parent=borrower)
borrower.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_14 = "SELECT * FROM books where `Sl.no.` = %s;"
values_14 = (e_bok.get(),)
c.execute(sql_command_14, values_14)
rec = c.fetchall()
avail = rec[0][5]
b = rec[0][1]
accession = rec[0][3]
if avail == 'No':
sql_command_15 = "SELECT `Due Date` FROM borrow where `Accession no. of book` = %s;"
values_15 = (accession,)
c.execute(sql_command_15, values_15)
recs = c.fetchall()
dateb = recs[0][0]
new_date = dt.strptime(dateb, '%Y-%m-%d')
messagebox.showerror(
'Book Taken', f'Sorry, the book has already been taken. It will be returned back on {new_date.strftime("%A, %d %b %Y")}', parent=borrower)
borrower.focus_force()
else:
try:
date_1 = b_cal.get()
date_2 = b_cal_2.get()
new_date = dt.strptime(date_2, '%Y-%m-%d')
choice = messagebox.askyesno(
'Are you sure', f'Are you sure you want to borrow "{b}" to {e_nme.get()} till {new_date.strftime("%A, %d %b %Y")} ({date_2})', parent=borrower)
if choice:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_16 = "Insert into borrow(`STUDENT ID`,`STUDENT NAME`,`TITLE`,`Accession no. of book`,`DATE TAKEN`,`DUE DATE`,`STATUS`) VALUES(%s,%s,%s,%s,%s,%s,%s)"
values_16 = (e_sch.get(), e_nme.get(), b, str(
accession), str(date_1), str(date_2), 'Borrowing')
c.execute(sql_command_16, values_16)
c.execute('commit')
con.close()
messagebox.showinfo(
'Success', f'"{b}" has been succesfully borrowed by {e_nme.get()}', parent=borrower)
borrower.focus_force()
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_17 = "UPDATE books set `availability`= 'No' WHERE `sl.no.`=%s or `Accession no.`=%s;"
values_17 = (e_bok.get(), e_bok.get())
c.execute(sql_command_17, values_17)
c.execute('commit')
con.close()
e_sch.delete(0, END)
e_bok.delete(0, END)
e_nme.delete(0, END)
drop.current(0)
else:
borrower.focus_force()
except NameError:
messagebox.showerror(
'Choose the date', 'Make sure to choose the date of borrowing and due date', parent=borrower)
borrower.focus_force()
elif a == 'Accession no.':
if e_bok.get() == '' or e_nme.get() == '' or e_sch.get() == '':
messagebox.showerror(
'Fill all the blanks', 'Make sure to fill in all the blanks', parent=borrower)
borrower.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_18 = "SELECT `accession no.` FROM books;"
c.execute(sql_command_18)
records = c.fetchall()
real_acc = []
for nums in range(5553):
recs = records[nums][0]
real_acc.append(recs)
if e_bok.get() not in real_acc:
messagebox.showerror(
'Does not exist', 'Sorry, such book does not exist. Please try someother book', parent=borrower)
borrower.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_19 = "SELECT * FROM books where `Accession no.` = %s;"
values_19 = (e_bok.get(),)
c.execute(sql_command_19, values_19)
rec = c.fetchall()
avail = rec[0][5]
b = rec[0][1]
accession = rec[0][3]
if avail == 'No':
sql_command_20 = "SELECT `Due Date` FROM borrow where `Accession no. of book` = %s;"
values_20 = (accession,)
c.execute(sql_command_20, values_20)
recs = c.fetchall()
c = recs[0][0]
new_date = dt.strptime(c, '%Y-%m-%d')
messagebox.showerror(
'Book Taken', f'Sorry, the book has already been taken. It will be returned back on {new_date.strftime("%A, %d %b %Y")}', parent=borrower)
borrower.focus_force()
else:
try:
date_1 = b_cal.get()
date_2 = b_cal_2.get()
new_date = dt.strptime(date_2, '%Y-%m-%d')
choice = messagebox.askyesno(
'Are you sure', f'Are you sure you want to borrow "{b}" to {e_nme.get()} till {new_date.strftime("%A, %d %b %Y")} ({date_2})', parent=borrower)
if choice:
sql_command_21 = "Insert into borrow(`STUDENT ID`,`STUDENT NAME`,`TITLE`,`Accession no. of book`,`DATE TAKEN`,`DUE DATE`,`STATUS`) VALUES (%s,%s,%s,%s,%s,%s,%s)"
values_21 = (e_sch.get(), e_nme.get(), b, str(
accession), str(date_1), str(date_2), 'Borrowing')
c.execute(sql_command_21, values_21)
c.execute('commit')
con.close()
messagebox.showinfo(
'Success', f'"{b}" has been succesfully borrowed by {e_nme.get()}', parent=borrower)
borrower.focus_force()
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_22 = "UPDATE books set `availability`= 'No' WHERE `Accession no.`=%s;"
values_22 = (e_bok.get(),)
c.execute(sql_command_22, values_22)
c.execute('commit')
con.close()
e_sch.delete(0, END)
e_bok.delete(0, END)
e_nme.delete(0, END)
drop.current(0)
else:
borrower.focus_force()
except NameError:
messagebox.showerror(
'Choose the date', 'Make sure to choose the date of borrowing and the due date', parent=borrower)
borrower.focus_force()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
borrower.destroy()
def key_pressed(event):
pygame.mixer.music.load('audio/type.mp3')
pygame.mixer.music.play()
def clicker(event):
pygame.mixer.music.load('audio/click.mp3')
pygame.mixer.music.play()
def change(event):
if drop.get() != 'Select by....':
l2.config(text=f'Enter {drop.get()}')
else:
l2.config(text='Enter')
def validation(inp):
if inp == '':
return True
elif inp.isdigit():
return True
else:
return False
def length_id(*args):
if len(e_sch.get()) > 6:
var_id.set(e_sch.get()[:-1])
def length_name(*args):
if len(e_nme.get()) > 30:
var_name.set(e_nme.get()[:-1])
try:
if not e_nme.get()[0].istitle():
var_name.set(e_nme.get()[0].title())
except IndexError:
pass
def validation_nme(inp):
if inp == '':
return True
elif inp.isalpha():
return True
elif ' ' in inp:
return True
else:
return False
var_id = StringVar()
var_name = StringVar()
vcmd = borrower.register(validation)
vcmd1 = borrower.register(validation_nme)
drop = ttk.Combobox(borrower, value=['Select by....', 'Sl.no.', 'Accession no.'], state='readonly')
drop.current(0)
l0 = Label(borrower, text='Borrow a book', font=Font(family='helvetica', size='20'))
l1 = Label(borrower, text='Student ID', font=font_text)
l4 = Label(borrower, text='Student Name', font=font_text)
e_sch = ttk.Entry(borrower,validate='all',validatecommand=(vcmd,'%P'),textvariable=var_id)
e_nme = ttk.Entry(borrower,textvariable=var_name,validate='all',validatecommand=(vcmd1,'%S'))
l2 = Label(borrower, text='Enter', font=font_text)
e_bok = ttk.Entry(borrower,validate='all',validatecommand=(vcmd,'%P'))
b_giv = Button(borrower, text='Give book', command=dbase, font=font_text)
b_ext = Button(borrower, text='Close',
command=out, font=font_text)
l3 = Label(borrower, text='Choose Date of Borrowing', font=font_text)
l5 = Label(borrower, text='Choose Due Date', font=font_text)
b_cal = DateEntry(borrower, font="Arial 12", selectmode='day', year=int(datetime.datetime.now().strftime("%Y")), month=int(datetime.datetime.now().strftime("%m")), day=int(datetime.datetime.now().strftime("%d")),date_pattern='y-mm-dd',state='readonly')
b_cal_2 = DateEntry(borrower, font="Arial 12", selectmode='day', year=int(datetime.datetime.now().strftime("%Y")), month=int(datetime.datetime.now().strftime("%m")), day=int(datetime.datetime.now().strftime("%d")),date_pattern='y-mm-dd',state='readonly')
l5.grid(row=6, column=0, padx=30, sticky=W)
b_cal_2.grid(row=6, column=1, ipadx=16, pady=10)
drop.grid(row=3, columnspan=3, pady=20, ipadx=10, ipady=5)
l0.grid(row=0, columnspan=3, pady=20)
l1.grid(row=1, column=0, padx=30, sticky=W)
e_sch.grid(row=1, column=1, padx=(30, 0), ipady=5)
l2.grid(row=4, column=0, padx=30, sticky=W)
e_bok.grid(row=4, column=1, padx=(30, 0), ipady=5)
b_giv.grid(row=8, columnspan=2, sticky=E+W, padx=(20, 0), pady=10)
b_ext.grid(row=9, columnspan=2, sticky=E+W, padx=(20, 0), pady=10)
l3.grid(row=5, column=0, padx=30, sticky=W)
b_cal.grid(row=5, column=1, ipadx=16, pady=10)
l4.grid(row=2, column=0, padx=30, pady=10, sticky=W)
e_nme.grid(row=2, column=1, padx=(30, 0), ipady=5)
e_sch.bind_all('<Key>', key_pressed)
e_nme.bind_all('<Key>', key_pressed)
e_bok.bind_all('<Key>', key_pressed)
b_giv.bind('<Button-1>', clicker)
drop.bind('<<ComboboxSelected>>',change)
var_id.trace('w',length_id)
var_name.trace('w',length_name)
q_mark_1 = Label(borrower, image=q_mark_new)
q_mark_1.grid(row=1, column=2, padx=10)
q_mark_2 = Label(borrower, image=q_mark_new)
q_mark_2.grid(row=2, column=2, padx=10)
q_mark_3 = Label(borrower, image=q_mark_new)
q_mark_3.grid(row=3, column=1, padx=10)
q_mark_4 = Label(borrower, image=q_mark_new)
q_mark_4.grid(row=4, column=2, padx=10)
q_mark_5 = Label(borrower, image=q_mark_new)
q_mark_5.grid(row=5, column=2, padx=10)
q_mark_6 = Label(borrower, image=q_mark_new)
q_mark_6.grid(row=6, column=2, padx=10)
q_mark_7 = Label(borrower, image=q_mark_new)
q_mark_7.grid(row=8, column=2, padx=10)
# Creating a tooltip for each ? icon
ctk.ToolTip(q_mark_1, "Student ID:\nEnter your valid student ID")
ctk.ToolTip(q_mark_2, "Student Name:\nEnter your full name")
ctk.ToolTip(q_mark_3, 'Select by:\nChoose how you want to select your book')
ctk.ToolTip(q_mark_4, 'Book:\nEnter the corresponding information of the book')
ctk.ToolTip(q_mark_5, 'Date of borrowing:\nClick to choose the date of borrowing ( today\'s date )')
ctk.ToolTip(q_mark_6, 'Due Date:\nClick to choose the date of returning')
ctk.ToolTip(q_mark_7, 'Give Book:\nClick to lend the book')
# Function to search the book
def search():
global q_mark_new
log = Toplevel(root)
log.title('Search Book')
log.resizable(False, False)
log.iconbitmap('images/search.ico')
log.focus_force()
log.geometry('+150+150')
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def dbase(event=None):
a = drop.get()
if a == 'Search by....':
messagebox.showerror(
'No choice given', 'Please choose a valid option to search by....', parent=log)
e_sch.focus_force()
elif a == 'Sl.no.' or a == 'Accession no.':
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=log)
log.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_23 = 'SELECT * from books where `{}`=%s;'
sql_command_23 = sql_command_23.format(a)
values_23 = (e_sch.get(),)
c.execute(sql_command_23, values_23)
result = c.fetchall()
if result == []:
messagebox.showerror(
'Book does not exist', 'No such books found, please try a different book', parent=log)
log.focus_force()
else:
result_win = Toplevel(log)
result_win.title('Search result')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
index = 0
for index, x in enumerate(result):
num = 0
for y in x:
lookup_label = Label(result_win, text=y)
lookup_label.grid(row=index+1, column=num)
num += 1
con.close()
l1 = Label(result_win, text='Sl.No', font=font_text)
l2 = Label(result_win, text='Title', font=font_text)
l3 = Label(result_win, text='Authors', font=font_text)
l6 = Label(result_win, text='Accession Number',
font=font_text)
l4 = Label(result_win, text='Subject', font=font_text)
l5 = Label(result_win, text='Availability', font=font_text)
btn_ext = Button(result_win, text='Exit', font=font_text,
command=out, borderwidth=2, fg='#eb4d4b')
l1.grid(row=0, column=0, padx=20)
l2.grid(row=0, column=1, padx=20)
l3.grid(row=0, column=2, padx=20)
l6.grid(row=0, column=3, padx=50)
l4.grid(row=0, column=4, padx=20)
l5.grid(row=0, column=5, padx=20)
btn_ext.grid(row=index+2, columnspan=7, sticky=E+W)
e_sch.delete(0, END)
elif a == 'Approximate Accession no.':
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=log)
log.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_32 = "SELECT * from books where `Accession no.` REGEXP %s;"
values_32 = (e_sch.get(),)
c.execute(sql_command_32, values_32)
result = c.fetchall()
if result == []:
messagebox.showerror(
'No such book', 'No such books found, please try a different book', parent=log)
log.focus_force()
if len(result) > 20:
result_win = Toplevel(root)
result_win.title('View all Visitors')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
# setup treeview
columns = (('Sl.no', 80), ('Title of the book', 500), ('Author', 300), ('Accession no.', 80),
('Subject', 150), ('Availability', 80))
tree = ttk.Treeview(result_win, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col)
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_33 = 'SELECT * FROM books where `Accession no.` REGEXP %s;'
values_33 = (e_sch.get(),)
c.execute(sql_command_33, values_33)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(
result_win, orient=tk.VERTICAL, command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
btn = tk.Button(result_win, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
else:
result_win = Toplevel(log)
result_win.title('Search result')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
index = 0
for index, x in enumerate(result):
num = 0
for y in x:
lookup_label = Label(result_win, text=y)
lookup_label.grid(row=index+1, column=num)
num += 1
con.close()
l1 = Label(result_win, text='Sl.No', font=font_text)
l2 = Label(result_win, text='Title', font=font_text)
l3 = Label(result_win, text='Authors', font=font_text)
l6 = Label(result_win, text='Accession Number',
font=font_text)
l4 = Label(result_win, text='Subject', font=font_text)
l5 = Label(result_win, text='Availablity', font=font_text)
btn_ext = Button(result_win, text='Exit', font=font_text,
command=out, borderwidth=2, fg='#eb4d4b')
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=index+2, columnspan=7, sticky=E+W)
l1.grid(row=0, column=0, padx=20)
l2.grid(row=0, column=1, padx=20)
l3.grid(row=0, column=2, padx=20)
l4.grid(row=0, column=4, padx=20)
l5.grid(row=0, column=5, padx=20)
l6.grid(row=0, column=3, padx=50)
btn_ext.grid(row=index+3, columnspan=7, sticky=E+W)
e_sch.delete(0, END)
elif a == 'Approximate Sl.no.':
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=log)
log.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_34 = "SELECT * from books where `sl.no.` REGEXP %s;"
values_34 = (e_sch.get(),)
c.execute(sql_command_34, values_34)
result = c.fetchall()
if result == []:
messagebox.showerror(
'No such book', 'No such books found, please try a different book', parent=log)
log.focus_force()
if len(result) > 20:
result_win = Toplevel(root)
result_win.title('View all Visitors')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
# setup treeview
columns = (('Sl.no', 80), ('Title of the book', 500), ('Author', 300), ('Accession no.', 80),
('Subject', 150), ('Availability', 80))
tree = ttk.Treeview(result_win, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col)
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_35 = 'SELECT * FROM books where `sl.no.` REGEXP %s;'
values_35 = (e_sch.get(),)
c.execute(sql_command_35, values_35)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(
result_win, orient=tk.VERTICAL, command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
btn = tk.Button(result_win, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
else:
result_win = Toplevel(log)
result_win.title('Search result')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
index = 0
for index, x in enumerate(result):
num = 0
for y in x:
lookup_label = Label(result_win, text=y)
lookup_label.grid(row=index+1, column=num)
num += 1
con.close()
l1 = Label(result_win, text='Sl.No', font=font_text)
l2 = Label(result_win, text='Title', font=font_text)
l3 = Label(result_win, text='Authors', font=font_text)
l6 = Label(result_win, text='Accession Number',
font=font_text)
l4 = Label(result_win, text='Subject', font=font_text)
l5 = Label(result_win, text='Availablity', font=font_text)
btn_ext = Button(result_win, text='Exit', font=font_text,
command=out, borderwidth=2, fg='#eb4d4b')
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=index+2, columnspan=7, sticky=E+W)
l1.grid(row=0, column=0, padx=20)
l2.grid(row=0, column=1, padx=20)
l3.grid(row=0, column=2, padx=20)
l4.grid(row=0, column=4, padx=20)
l5.grid(row=0, column=5, padx=20)
l6.grid(row=0, column=3, padx=50)
btn_ext.grid(row=index+3, columnspan=7, sticky=E+W)
e_sch.delete(0, END)
else:
if e_sch.get() == '':
messagebox.showerror(
'No data enter', 'Please enter a data in the box to search', parent=log)
log.focus_force()
else:
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_24 = "SELECT * from books where `{}` REGEXP %s;"
sql_command_24 = sql_command_24.format(a)
values_24 = (e_sch.get(),)
c.execute(sql_command_24, values_24)
result = c.fetchall()
if result == []:
messagebox.showerror(
'No such book', 'No such books found, please try a different book', parent=log)
log.focus_force()
if len(result) > 20:
result_win = Toplevel(root)
result_win.title('View all Visitors')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
# setup treeview
columns = (('Sl.no', 80), ('Title of the book', 500), ('Author', 300), ('Accession no.', 80),
('Subject', 150), ('Availability', 80))
tree = ttk.Treeview(result_win, height=20, columns=[
x[0] for x in columns], show='headings')
tree.grid(row=0, column=0, sticky='news')
# setup columns attributes
for col, width in columns:
tree.heading(col, text=col)
tree.column(col, width=width, anchor=tk.CENTER)
# fetch data
con = mysql.connect(host='', user='',
password='', database='')
c = con.cursor()
sql_command_25 = 'SELECT * FROM books where `{}` REGEXP %s;'
sql_command_25 = sql_command_25.format(a)
values_25 = (e_sch.get(),)
c.execute(sql_command_25, values_25)
# populate data to treeview
for rec in c:
tree.insert('', 'end', value=rec)
def pop_menu(event):
global column
tree.identify_row(event.y)
column = tree.identify_column(event.x)
popup1.post(event.x_root, event.y_root)
def copy():
row_id = tree.selection()
column_no = column
select = tree.set(row_id, column_no)
log.clipboard_append(select)
log.update()
popup1 = Menu(log, tearoff=0)
popup1.add_command(label='Copy', command=copy)
tree.bind('<Button-3>', pop_menu)
# scrollbar
sb = tk.Scrollbar(
result_win, orient=tk.VERTICAL, command=tree.yview)
sb.grid(row=0, column=1, sticky='ns')
tree.config(yscrollcommand=sb.set)
a = tree.item(tree.focus())['values']
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=1, columnspan=2, sticky=E+W)
btn = tk.Button(result_win, text='Close', command=out,
width=20, bd=2, fg='red', font=font_text)
btn.grid(row=2, column=0, columnspan=2, sticky=E+W)
con.close()
e_sch.delete(0, END)
else:
result_win = Toplevel(log)
result_win.title('Search result')
result_win.focus_force()
result_win.iconbitmap('images/booksearch.ico')
result_win.geometry('+150+200')
result_win.resizable(False, False)
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
result_win.destroy()
index = 0
for index, x in enumerate(result):
num = 0
for y in x:
lookup_label = Label(result_win, text=y)
lookup_label.grid(row=index+1, column=num)
num += 1
con.close()
l1 = Label(result_win, text='Sl.No', font=font_text)
l2 = Label(result_win, text='Title', font=font_text)
l3 = Label(result_win, text='Authors', font=font_text)
l6 = Label(result_win, text='Accession Number',
font=font_text)
l4 = Label(result_win, text='Subject', font=font_text)
l5 = Label(result_win, text='Availablity', font=font_text)
btn_ext = Button(result_win, text='Exit', font=font_text,
command=out, borderwidth=2, fg='#eb4d4b')
status = Label(
result_win, text=f'Total records fetched: {len(result)}', bd=1, relief=SUNKEN, anchor=W)
status.grid(row=index+2, columnspan=7, sticky=E+W)
l1.grid(row=0, column=0, padx=20)
l2.grid(row=0, column=1, padx=20)
l3.grid(row=0, column=2, padx=20)
l4.grid(row=0, column=4, padx=20)
l5.grid(row=0, column=5, padx=20)
l6.grid(row=0, column=3, padx=50)
btn_ext.grid(row=index+3, columnspan=7, sticky=E+W)
e_sch.delete(0, END)
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
log.destroy()
def key_pressed(event):
pygame.mixer.music.load('audio/type.mp3')
pygame.mixer.music.play()
def clicker(event):
pygame.mixer.music.load('audio/click.mp3')
pygame.mixer.music.play()
def change(event):
if drop.get() != 'Search by....':
l2.config(text=f'Enter {drop.get()}')
else:
l2.config(text='Enter')
global a
l = Label(log, text='Search Book', font=Font(
family='helvetica', size='20'))
drop = ttk.Combobox(log, value=['Search by....', 'Sl.no.', 'Title of the book', 'Accession no.',
'Approximate Accession no.', 'Approximate Sl.no.', 'Authors', 'Subject'], state='readonly')
drop.current(0)
l2 = Label(log, text='Enter', font=font_text)
e_sch = ttk.Entry(log)
b_sch = Button(log, text='Search', command=dbase, font=font_text)
b_ext = Button(log, text='Close', command=out, font=font_text)
a = drop.get()
l.grid(row=0, columnspan=3, pady=20)
drop.grid(row=1, column=0, columnspan=3, ipady=5,ipadx=10)
l2.grid(row=2, column=0, padx=(20, 0))
e_sch.grid(row=2, column=1, ipady=5, pady=20)
b_sch.grid(row=3, column=0, columnspan=3, ipadx=200, sticky=E+W)
b_ext.grid(row=4, column=0, columnspan=3, sticky=E+W)
e_sch.bind_all('<Key>', key_pressed)
b_sch.bind('<Button-1>', clicker)
e_sch.bind('<Return>', dbase)
drop.bind('<<ComboboxSelected>>',change)
e_sch.focus_force()
q_mark_1 = Label(log, image=q_mark_new)
q_mark_1.grid(row=1, column=2, padx=(0, 10), sticky=W)
q_mark_2 = Label(log, image=q_mark_new)
q_mark_2.grid(row=2, column=2, padx=(0, 10), sticky=W)
ctk.ToolTip(q_mark_1, 'Select by:\nChoose how you want to search for the book')
ctk.ToolTip(q_mark_2, 'Book:\nEnter the corresponding information of the book')
# Function to pop-open about
def about():
# Defining Urls
url = "https://nihaalnz.herokuapp.com"
url_2 = "https://github.com/nihaalnz/Library-App-python-tkinter"
def openweb():
webbrowser.open(url, new=1)
def openweb_2():
webbrowser.open(url_2, new=1)
def out():
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.3)
about.destroy()
# Define about section
about = Toplevel(root)
about.title('About')
about.iconbitmap('images/moderator.ico')
about.geometry('300x300+300+300')
about.resizable(False, False)
about.focus_force()
pygame.mixer.music.load('audio/pop_open.mp3')
pygame.mixer.music.play()
# Making frames
frame = LabelFrame(about, text='About this program', padx=5, pady=5)
# Making frame items
l_name = Label(frame, text='Created by Nihaal Nz')
l_ver = Label(frame, text='Ver : 5.00')
l_lic = Label(frame, text='Licensed under MIT')
btn_sup = Button(frame, text='Website', command=openweb)
btn_cod = Button(frame, text='Source Code', command=openweb_2)
btn_cls = Button(frame, text='Close', command=out)
#Placing in screen
frame.grid(row=0, column=0, padx=70, pady=40)
l_name.grid(row=0, column=0)
l_ver.grid(row=1, column=0)
l_lic.grid(row=2, column=0)
btn_sup.grid(row=3, columnspan=2, sticky=E+W, pady=(5, 0))
btn_cod.grid(row=4, columnspan=2, sticky=E+W, pady=5)
btn_cls.grid(row=5, columnspan=2, sticky=E+W)
# Function to exit the program
def exits():
selection = messagebox.askyesno(
'Exit', 'Are you sure you want to exit?', parent=root)
if selection == 1:
pygame.mixer.music.load('audio/main open.mp3')
pygame.mixer.music.play()
time.sleep(0.5)
root.destroy()
else:
root.focus_force()
# Define menu
my_menu = Menu(root)
root.config(menu=my_menu)
# Add menu items
file_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label='Menu', menu=file_menu)
file_menu.add_command(label='Search', command=search)
file_menu.add_command(label='Borrow', command=borrow)
file_menu.add_command(label='Return', command=return_b)
file_menu.add_separator()
file_menu.add_command(label='Show all logs', command=all_logs)
file_menu.add_command(label='Search visitors', command=sp_logs)
file_menu.add_separator()
file_menu.add_command(label='About', command=about)
file_menu.add_separator()
file_menu.add_command(label='Exit', command=exits)
# Heading
l = Label(root, text='Main Menu', font=Font(family='helvetica', size='20'))
# Making buttons
btn_brw = Button(root, text='Borrow', command=borrow, font=font_text)
btn_ret = Button(root, text='Return', command=return_b, font=font_text)
btn_sch = Button(root, text='Search', command=search, font=font_text)
btn_log = Button(root, text='Search Visitors', command=sp_logs, font=font_text)
btn_logs = Button(root, text='Show All Visitors',command=all_logs, font=font_text)
btn_ext = Button(root, text='Exit', command=exits, font=font_text, fg='red')
# Placing in screen
btn_brw.grid(column=1, row=1, pady=65, padx=30, ipadx=30)
btn_ret.grid(column=2, row=1, ipadx=30, padx=(0, 10))
btn_sch.grid(column=0, row=1, padx=(8, 0), ipadx=30)
btn_log.grid(columnspan=3, row=3, sticky=E+W, padx=1, ipady=1, pady=(0, 20))
btn_logs.grid(columnspan=3, row=2, sticky=E+W, padx=1, ipady=1)
btn_ext.grid(columnspan=3, row=4, sticky=E+W, padx=1, ipady=1)
l.grid(row=0, columnspan=3, pady=(20, 0))
# Creating ? icons
q_mark = Image.open('images/question_mark.png')
q_mark_re = q_mark.resize((15, 15), Image.ANTIALIAS)
q_mark_new = ImageTk.PhotoImage(q_mark_re)
# Trying to establish database connection
try:
mysql.connect(host='', user='',
password='', database='')
except:
messagebox.showerror('Connection Failed','Connection with the database could not be established.\nPlease try again later.',parent=root)
root.destroy()
# Ending program
root.mainloop()
| 44.344201
| 258
| 0.487586
| 9,493
| 82,968
| 4.148952
| 0.052986
| 0.022749
| 0.035749
| 0.022343
| 0.851089
| 0.826563
| 0.804499
| 0.774717
| 0.744325
| 0.728178
| 0
| 0.030406
| 0.391139
| 82,968
| 1,870
| 259
| 44.367914
| 0.749268
| 0.020827
| 0
| 0.736598
| 0
| 0.018531
| 0.134799
| 0.005064
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045665
| false
| 0.018531
| 0.009927
| 0
| 0.06221
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd897e57a3bba1e37765d27d7b21b28a539f5c90
| 3,550
|
py
|
Python
|
app/esper/queries/person_x.py
|
DanFu09/esper
|
ccc5547de3637728b8aaab059b6781baebc269ec
|
[
"Apache-2.0"
] | 4
|
2018-12-27T07:21:38.000Z
|
2019-01-04T10:35:02.000Z
|
app/esper/queries/person_x.py
|
DanFu09/esper
|
ccc5547de3637728b8aaab059b6781baebc269ec
|
[
"Apache-2.0"
] | null | null | null |
app/esper/queries/person_x.py
|
DanFu09/esper
|
ccc5547de3637728b8aaab059b6781baebc269ec
|
[
"Apache-2.0"
] | null | null | null |
from esper.prelude import *
from esper.rekall import *
from .queries import query
@query("Frames with actor X (rekall)")
def frames_with_actor_x():
from query.models import FaceCharacterActor
from rekall.video_interval_collection import VideoIntervalCollection
from rekall.parsers import in_array, bbox_payload_parser, merge_dict_parsers, dict_payload_parser
from rekall.merge_ops import payload_plus
from rekall.payload_predicates import payload_satisfies
from rekall.spatial_predicates import scene_graph
from esper.rekall import intrvllists_to_result_bbox
actor_name = "daniel radcliffe"
# Annotate face rows with start and end frames and the video ID
faces_with_character_actor_qs = FaceCharacterActor.objects.annotate(
min_frame=F('face__frame__number'),
max_frame=F('face__frame__number'),
video_id=F('face__frame__video_id'),
bbox_x1=F('face__bbox_x1'),
bbox_y1=F('face__bbox_y1'),
bbox_x2=F('face__bbox_x2'),
bbox_y2=F('face__bbox_y2'),
actor_name=F('characteractor__actor__name')
)
faces_with_identity = VideoIntervalCollection.from_django_qs(
faces_with_character_actor_qs,
with_payload=in_array(merge_dict_parsers([
bbox_payload_parser(VideoIntervalCollection.django_accessor),
dict_payload_parser(VideoIntervalCollection.django_accessor, { 'actor': 'actor_name' }),
]))
).coalesce(payload_merge_op=payload_plus)
faces_with_actor = faces_with_identity.filter(payload_satisfies(scene_graph({
'nodes': [ { 'name': 'face1', 'predicates': [ lambda f: f['actor'] == actor_name ] } ],
'edges': []
})))
return intrvllists_to_result_bbox(faces_with_actor.get_allintervals(), limit=100, stride=1000)
@query("Frames with character X (rekall)")
def frames_with_character_x():
from query.models import FaceCharacterActor
from rekall.video_interval_collection import VideoIntervalCollection
from rekall.parsers import in_array, bbox_payload_parser, merge_dict_parsers, dict_payload_parser
from rekall.merge_ops import payload_plus
from rekall.payload_predicates import payload_satisfies
from rekall.spatial_predicates import scene_graph
from esper.rekall import intrvllists_to_result_bbox
character_name = "harry potter"
# Annotate face rows with start and end frames and the video ID
faces_with_character_actor_qs = FaceCharacterActor.objects.annotate(
min_frame=F('face__frame__number'),
max_frame=F('face__frame__number'),
video_id=F('face__frame__video_id'),
bbox_x1=F('face__bbox_x1'),
bbox_y1=F('face__bbox_y1'),
bbox_x2=F('face__bbox_x2'),
bbox_y2=F('face__bbox_y2'),
character_name=F('characteractor__character__name')
)
faces_with_identity = VideoIntervalCollection.from_django_qs(
faces_with_character_actor_qs,
with_payload=in_array(merge_dict_parsers([
bbox_payload_parser(VideoIntervalCollection.django_accessor),
dict_payload_parser(VideoIntervalCollection.django_accessor, { 'character': 'character_name' }),
]))
).coalesce(payload_merge_op=payload_plus)
faces_with_actor = faces_with_identity.filter(payload_satisfies(scene_graph({
'nodes': [ { 'name': 'face1', 'predicates': [ lambda f: f['character'] == character_name ] } ],
'edges': []
})))
return intrvllists_to_result_bbox(faces_with_actor.get_allintervals(), limit=100, stride=1000)
| 43.292683
| 108
| 0.735493
| 444
| 3,550
| 5.439189
| 0.18018
| 0.028986
| 0.029814
| 0.038095
| 0.878675
| 0.862112
| 0.862112
| 0.862112
| 0.862112
| 0.862112
| 0
| 0.010914
| 0.174085
| 3,550
| 81
| 109
| 43.82716
| 0.812756
| 0.034648
| 0
| 0.746269
| 0
| 0
| 0.139603
| 0.029206
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0
| 0.253731
| 0
| 0.313433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd9353b618380fc36b017380e5766113fe968f65
| 38,940
|
py
|
Python
|
sdk/python/pulumi_alicloud/kms/secret.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/kms/secret.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/kms/secret.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SecretArgs', 'Secret']
@pulumi.input_type
class SecretArgs:
def __init__(__self__, *,
secret_data: pulumi.Input[str],
secret_name: pulumi.Input[str],
version_id: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
enable_automatic_rotation: Optional[pulumi.Input[bool]] = None,
encryption_key_id: Optional[pulumi.Input[str]] = None,
force_delete_without_recovery: Optional[pulumi.Input[bool]] = None,
recovery_window_in_days: Optional[pulumi.Input[int]] = None,
rotation_interval: Optional[pulumi.Input[str]] = None,
secret_data_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
version_stages: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Secret resource.
:param pulumi.Input[str] secret_data: The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
:param pulumi.Input[str] secret_name: The name of the secret.
:param pulumi.Input[str] version_id: The version number of the initial version. Version numbers are unique in each secret object.
:param pulumi.Input[str] description: The description of the secret.
:param pulumi.Input[bool] enable_automatic_rotation: Whether to enable automatic key rotation.
:param pulumi.Input[str] encryption_key_id: The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
:param pulumi.Input[bool] force_delete_without_recovery: Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
:param pulumi.Input[int] recovery_window_in_days: Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
:param pulumi.Input[str] rotation_interval: The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
:param pulumi.Input[str] secret_data_type: The type of the secret value. Valid values: text, binary. Default to "text".
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] version_stages: ) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
pulumi.set(__self__, "secret_data", secret_data)
pulumi.set(__self__, "secret_name", secret_name)
pulumi.set(__self__, "version_id", version_id)
if description is not None:
pulumi.set(__self__, "description", description)
if enable_automatic_rotation is not None:
pulumi.set(__self__, "enable_automatic_rotation", enable_automatic_rotation)
if encryption_key_id is not None:
pulumi.set(__self__, "encryption_key_id", encryption_key_id)
if force_delete_without_recovery is not None:
pulumi.set(__self__, "force_delete_without_recovery", force_delete_without_recovery)
if recovery_window_in_days is not None:
pulumi.set(__self__, "recovery_window_in_days", recovery_window_in_days)
if rotation_interval is not None:
pulumi.set(__self__, "rotation_interval", rotation_interval)
if secret_data_type is not None:
pulumi.set(__self__, "secret_data_type", secret_data_type)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if version_stages is not None:
pulumi.set(__self__, "version_stages", version_stages)
@property
@pulumi.getter(name="secretData")
def secret_data(self) -> pulumi.Input[str]:
"""
The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
"""
return pulumi.get(self, "secret_data")
@secret_data.setter
def secret_data(self, value: pulumi.Input[str]):
pulumi.set(self, "secret_data", value)
@property
@pulumi.getter(name="secretName")
def secret_name(self) -> pulumi.Input[str]:
"""
The name of the secret.
"""
return pulumi.get(self, "secret_name")
@secret_name.setter
def secret_name(self, value: pulumi.Input[str]):
pulumi.set(self, "secret_name", value)
@property
@pulumi.getter(name="versionId")
def version_id(self) -> pulumi.Input[str]:
"""
The version number of the initial version. Version numbers are unique in each secret object.
"""
return pulumi.get(self, "version_id")
@version_id.setter
def version_id(self, value: pulumi.Input[str]):
pulumi.set(self, "version_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the secret.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="enableAutomaticRotation")
def enable_automatic_rotation(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable automatic key rotation.
"""
return pulumi.get(self, "enable_automatic_rotation")
@enable_automatic_rotation.setter
def enable_automatic_rotation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_automatic_rotation", value)
@property
@pulumi.getter(name="encryptionKeyId")
def encryption_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
"""
return pulumi.get(self, "encryption_key_id")
@encryption_key_id.setter
def encryption_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "encryption_key_id", value)
@property
@pulumi.getter(name="forceDeleteWithoutRecovery")
def force_delete_without_recovery(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
"""
return pulumi.get(self, "force_delete_without_recovery")
@force_delete_without_recovery.setter
def force_delete_without_recovery(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_delete_without_recovery", value)
@property
@pulumi.getter(name="recoveryWindowInDays")
def recovery_window_in_days(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
"""
return pulumi.get(self, "recovery_window_in_days")
@recovery_window_in_days.setter
def recovery_window_in_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "recovery_window_in_days", value)
@property
@pulumi.getter(name="rotationInterval")
def rotation_interval(self) -> Optional[pulumi.Input[str]]:
"""
The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
"""
return pulumi.get(self, "rotation_interval")
@rotation_interval.setter
def rotation_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rotation_interval", value)
@property
@pulumi.getter(name="secretDataType")
def secret_data_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the secret value. Valid values: text, binary. Default to "text".
"""
return pulumi.get(self, "secret_data_type")
@secret_data_type.setter
def secret_data_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_data_type", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="versionStages")
def version_stages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
return pulumi.get(self, "version_stages")
@version_stages.setter
def version_stages(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "version_stages", value)
@pulumi.input_type
class _SecretState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_automatic_rotation: Optional[pulumi.Input[bool]] = None,
encryption_key_id: Optional[pulumi.Input[str]] = None,
force_delete_without_recovery: Optional[pulumi.Input[bool]] = None,
planned_delete_time: Optional[pulumi.Input[str]] = None,
recovery_window_in_days: Optional[pulumi.Input[int]] = None,
rotation_interval: Optional[pulumi.Input[str]] = None,
secret_data: Optional[pulumi.Input[str]] = None,
secret_data_type: Optional[pulumi.Input[str]] = None,
secret_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
version_id: Optional[pulumi.Input[str]] = None,
version_stages: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Secret resources.
:param pulumi.Input[str] arn: The Alicloud Resource Name (ARN) of the secret.
:param pulumi.Input[str] description: The description of the secret.
:param pulumi.Input[bool] enable_automatic_rotation: Whether to enable automatic key rotation.
:param pulumi.Input[str] encryption_key_id: The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
:param pulumi.Input[bool] force_delete_without_recovery: Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
:param pulumi.Input[str] planned_delete_time: The time when the secret is scheduled to be deleted.
:param pulumi.Input[int] recovery_window_in_days: Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
:param pulumi.Input[str] rotation_interval: The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
:param pulumi.Input[str] secret_data: The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
:param pulumi.Input[str] secret_data_type: The type of the secret value. Valid values: text, binary. Default to "text".
:param pulumi.Input[str] secret_name: The name of the secret.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] version_id: The version number of the initial version. Version numbers are unique in each secret object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] version_stages: ) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if description is not None:
pulumi.set(__self__, "description", description)
if enable_automatic_rotation is not None:
pulumi.set(__self__, "enable_automatic_rotation", enable_automatic_rotation)
if encryption_key_id is not None:
pulumi.set(__self__, "encryption_key_id", encryption_key_id)
if force_delete_without_recovery is not None:
pulumi.set(__self__, "force_delete_without_recovery", force_delete_without_recovery)
if planned_delete_time is not None:
pulumi.set(__self__, "planned_delete_time", planned_delete_time)
if recovery_window_in_days is not None:
pulumi.set(__self__, "recovery_window_in_days", recovery_window_in_days)
if rotation_interval is not None:
pulumi.set(__self__, "rotation_interval", rotation_interval)
if secret_data is not None:
pulumi.set(__self__, "secret_data", secret_data)
if secret_data_type is not None:
pulumi.set(__self__, "secret_data_type", secret_data_type)
if secret_name is not None:
pulumi.set(__self__, "secret_name", secret_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if version_id is not None:
pulumi.set(__self__, "version_id", version_id)
if version_stages is not None:
pulumi.set(__self__, "version_stages", version_stages)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The Alicloud Resource Name (ARN) of the secret.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the secret.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="enableAutomaticRotation")
def enable_automatic_rotation(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable automatic key rotation.
"""
return pulumi.get(self, "enable_automatic_rotation")
@enable_automatic_rotation.setter
def enable_automatic_rotation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_automatic_rotation", value)
@property
@pulumi.getter(name="encryptionKeyId")
def encryption_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
"""
return pulumi.get(self, "encryption_key_id")
@encryption_key_id.setter
def encryption_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "encryption_key_id", value)
@property
@pulumi.getter(name="forceDeleteWithoutRecovery")
def force_delete_without_recovery(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
"""
return pulumi.get(self, "force_delete_without_recovery")
@force_delete_without_recovery.setter
def force_delete_without_recovery(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_delete_without_recovery", value)
@property
@pulumi.getter(name="plannedDeleteTime")
def planned_delete_time(self) -> Optional[pulumi.Input[str]]:
"""
The time when the secret is scheduled to be deleted.
"""
return pulumi.get(self, "planned_delete_time")
@planned_delete_time.setter
def planned_delete_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "planned_delete_time", value)
@property
@pulumi.getter(name="recoveryWindowInDays")
def recovery_window_in_days(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
"""
return pulumi.get(self, "recovery_window_in_days")
@recovery_window_in_days.setter
def recovery_window_in_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "recovery_window_in_days", value)
@property
@pulumi.getter(name="rotationInterval")
def rotation_interval(self) -> Optional[pulumi.Input[str]]:
"""
The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
"""
return pulumi.get(self, "rotation_interval")
@rotation_interval.setter
def rotation_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rotation_interval", value)
@property
@pulumi.getter(name="secretData")
def secret_data(self) -> Optional[pulumi.Input[str]]:
"""
The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
"""
return pulumi.get(self, "secret_data")
@secret_data.setter
def secret_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_data", value)
@property
@pulumi.getter(name="secretDataType")
def secret_data_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the secret value. Valid values: text, binary. Default to "text".
"""
return pulumi.get(self, "secret_data_type")
@secret_data_type.setter
def secret_data_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_data_type", value)
@property
@pulumi.getter(name="secretName")
def secret_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the secret.
"""
return pulumi.get(self, "secret_name")
@secret_name.setter
def secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="versionId")
def version_id(self) -> Optional[pulumi.Input[str]]:
"""
The version number of the initial version. Version numbers are unique in each secret object.
"""
return pulumi.get(self, "version_id")
@version_id.setter
def version_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version_id", value)
@property
@pulumi.getter(name="versionStages")
def version_stages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
return pulumi.get(self, "version_stages")
@version_stages.setter
def version_stages(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "version_stages", value)
class Secret(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
enable_automatic_rotation: Optional[pulumi.Input[bool]] = None,
encryption_key_id: Optional[pulumi.Input[str]] = None,
force_delete_without_recovery: Optional[pulumi.Input[bool]] = None,
recovery_window_in_days: Optional[pulumi.Input[int]] = None,
rotation_interval: Optional[pulumi.Input[str]] = None,
secret_data: Optional[pulumi.Input[str]] = None,
secret_data_type: Optional[pulumi.Input[str]] = None,
secret_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
version_id: Optional[pulumi.Input[str]] = None,
version_stages: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
This resouce used to create a secret and store its initial version.
> **NOTE:** Available in 1.76.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.kms.Secret("default",
description="from terraform",
force_delete_without_recovery=True,
secret_data="Secret data.",
secret_name="secret-foo",
version_id="000000000001")
```
## Import
KMS secret can be imported using the id, e.g.
```sh
$ pulumi import alicloud:kms/secret:Secret default secret-foo
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The description of the secret.
:param pulumi.Input[bool] enable_automatic_rotation: Whether to enable automatic key rotation.
:param pulumi.Input[str] encryption_key_id: The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
:param pulumi.Input[bool] force_delete_without_recovery: Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
:param pulumi.Input[int] recovery_window_in_days: Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
:param pulumi.Input[str] rotation_interval: The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
:param pulumi.Input[str] secret_data: The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
:param pulumi.Input[str] secret_data_type: The type of the secret value. Valid values: text, binary. Default to "text".
:param pulumi.Input[str] secret_name: The name of the secret.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] version_id: The version number of the initial version. Version numbers are unique in each secret object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] version_stages: ) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SecretArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resouce used to create a secret and store its initial version.
> **NOTE:** Available in 1.76.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default = alicloud.kms.Secret("default",
description="from terraform",
force_delete_without_recovery=True,
secret_data="Secret data.",
secret_name="secret-foo",
version_id="000000000001")
```
## Import
KMS secret can be imported using the id, e.g.
```sh
$ pulumi import alicloud:kms/secret:Secret default secret-foo
```
:param str resource_name: The name of the resource.
:param SecretArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SecretArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
enable_automatic_rotation: Optional[pulumi.Input[bool]] = None,
encryption_key_id: Optional[pulumi.Input[str]] = None,
force_delete_without_recovery: Optional[pulumi.Input[bool]] = None,
recovery_window_in_days: Optional[pulumi.Input[int]] = None,
rotation_interval: Optional[pulumi.Input[str]] = None,
secret_data: Optional[pulumi.Input[str]] = None,
secret_data_type: Optional[pulumi.Input[str]] = None,
secret_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
version_id: Optional[pulumi.Input[str]] = None,
version_stages: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SecretArgs.__new__(SecretArgs)
__props__.__dict__["description"] = description
__props__.__dict__["enable_automatic_rotation"] = enable_automatic_rotation
__props__.__dict__["encryption_key_id"] = encryption_key_id
__props__.__dict__["force_delete_without_recovery"] = force_delete_without_recovery
__props__.__dict__["recovery_window_in_days"] = recovery_window_in_days
__props__.__dict__["rotation_interval"] = rotation_interval
if secret_data is None and not opts.urn:
raise TypeError("Missing required property 'secret_data'")
__props__.__dict__["secret_data"] = secret_data
__props__.__dict__["secret_data_type"] = secret_data_type
if secret_name is None and not opts.urn:
raise TypeError("Missing required property 'secret_name'")
__props__.__dict__["secret_name"] = secret_name
__props__.__dict__["tags"] = tags
if version_id is None and not opts.urn:
raise TypeError("Missing required property 'version_id'")
__props__.__dict__["version_id"] = version_id
__props__.__dict__["version_stages"] = version_stages
__props__.__dict__["arn"] = None
__props__.__dict__["planned_delete_time"] = None
super(Secret, __self__).__init__(
'alicloud:kms/secret:Secret',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_automatic_rotation: Optional[pulumi.Input[bool]] = None,
encryption_key_id: Optional[pulumi.Input[str]] = None,
force_delete_without_recovery: Optional[pulumi.Input[bool]] = None,
planned_delete_time: Optional[pulumi.Input[str]] = None,
recovery_window_in_days: Optional[pulumi.Input[int]] = None,
rotation_interval: Optional[pulumi.Input[str]] = None,
secret_data: Optional[pulumi.Input[str]] = None,
secret_data_type: Optional[pulumi.Input[str]] = None,
secret_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
version_id: Optional[pulumi.Input[str]] = None,
version_stages: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Secret':
"""
Get an existing Secret resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The Alicloud Resource Name (ARN) of the secret.
:param pulumi.Input[str] description: The description of the secret.
:param pulumi.Input[bool] enable_automatic_rotation: Whether to enable automatic key rotation.
:param pulumi.Input[str] encryption_key_id: The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
:param pulumi.Input[bool] force_delete_without_recovery: Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
:param pulumi.Input[str] planned_delete_time: The time when the secret is scheduled to be deleted.
:param pulumi.Input[int] recovery_window_in_days: Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
:param pulumi.Input[str] rotation_interval: The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
:param pulumi.Input[str] secret_data: The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
:param pulumi.Input[str] secret_data_type: The type of the secret value. Valid values: text, binary. Default to "text".
:param pulumi.Input[str] secret_name: The name of the secret.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] version_id: The version number of the initial version. Version numbers are unique in each secret object.
:param pulumi.Input[Sequence[pulumi.Input[str]]] version_stages: ) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SecretState.__new__(_SecretState)
__props__.__dict__["arn"] = arn
__props__.__dict__["description"] = description
__props__.__dict__["enable_automatic_rotation"] = enable_automatic_rotation
__props__.__dict__["encryption_key_id"] = encryption_key_id
__props__.__dict__["force_delete_without_recovery"] = force_delete_without_recovery
__props__.__dict__["planned_delete_time"] = planned_delete_time
__props__.__dict__["recovery_window_in_days"] = recovery_window_in_days
__props__.__dict__["rotation_interval"] = rotation_interval
__props__.__dict__["secret_data"] = secret_data
__props__.__dict__["secret_data_type"] = secret_data_type
__props__.__dict__["secret_name"] = secret_name
__props__.__dict__["tags"] = tags
__props__.__dict__["version_id"] = version_id
__props__.__dict__["version_stages"] = version_stages
return Secret(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The Alicloud Resource Name (ARN) of the secret.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the secret.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="enableAutomaticRotation")
def enable_automatic_rotation(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable automatic key rotation.
"""
return pulumi.get(self, "enable_automatic_rotation")
@property
@pulumi.getter(name="encryptionKeyId")
def encryption_key_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the KMS CMK that is used to encrypt the secret value. If you do not specify this parameter, Secrets Manager automatically creates an encryption key to encrypt the secret.
"""
return pulumi.get(self, "encryption_key_id")
@property
@pulumi.getter(name="forceDeleteWithoutRecovery")
def force_delete_without_recovery(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether to forcibly delete the secret. If this parameter is set to true, the secret cannot be recovered. Valid values: true, false. Default to: false.
"""
return pulumi.get(self, "force_delete_without_recovery")
@property
@pulumi.getter(name="plannedDeleteTime")
def planned_delete_time(self) -> pulumi.Output[str]:
"""
The time when the secret is scheduled to be deleted.
"""
return pulumi.get(self, "planned_delete_time")
@property
@pulumi.getter(name="recoveryWindowInDays")
def recovery_window_in_days(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the recovery period of the secret if you do not forcibly delete it. Default value: 30. It will be ignored when `force_delete_without_recovery` is true.
"""
return pulumi.get(self, "recovery_window_in_days")
@property
@pulumi.getter(name="rotationInterval")
def rotation_interval(self) -> pulumi.Output[Optional[str]]:
"""
The time period of automatic rotation. The format is integer[unit], where integer represents the length of time, and unit represents the unit of time. The legal unit units are: d (day), h (hour), m (minute), s (second). 7d or 604800s both indicate a 7-day cycle.
"""
return pulumi.get(self, "rotation_interval")
@property
@pulumi.getter(name="secretData")
def secret_data(self) -> pulumi.Output[str]:
"""
The value of the secret that you want to create. Secrets Manager encrypts the secret value and stores it in the initial version.
"""
return pulumi.get(self, "secret_data")
@property
@pulumi.getter(name="secretDataType")
def secret_data_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of the secret value. Valid values: text, binary. Default to "text".
"""
return pulumi.get(self, "secret_data_type")
@property
@pulumi.getter(name="secretName")
def secret_name(self) -> pulumi.Output[str]:
"""
The name of the secret.
"""
return pulumi.get(self, "secret_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="versionId")
def version_id(self) -> pulumi.Output[str]:
"""
The version number of the initial version. Version numbers are unique in each secret object.
"""
return pulumi.get(self, "version_id")
@property
@pulumi.getter(name="versionStages")
def version_stages(self) -> pulumi.Output[Sequence[str]]:
"""
) The stage labels that mark the new secret version. If you do not specify this parameter, Secrets Manager marks it with "ACSCurrent".
"""
return pulumi.get(self, "version_stages")
| 50.180412
| 314
| 0.671469
| 4,944
| 38,940
| 5.071804
| 0.048746
| 0.081157
| 0.065882
| 0.054397
| 0.934995
| 0.921555
| 0.9068
| 0.895314
| 0.889053
| 0.879681
| 0
| 0.003445
| 0.232178
| 38,940
| 775
| 315
| 50.245161
| 0.835212
| 0.364561
| 0
| 0.78733
| 1
| 0
| 0.115443
| 0.037769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165158
| false
| 0.002262
| 0.011312
| 0
| 0.276018
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
06d7b8e0bdba7d2c2aa4efeb8daa39dbe240b2fe
| 84
|
py
|
Python
|
pyxai/pyxai.py
|
ruivieira/python-experiments
|
c25cc7644d1437898ae5b0fa8073c5ae1442c3ba
|
[
"Apache-2.0"
] | null | null | null |
pyxai/pyxai.py
|
ruivieira/python-experiments
|
c25cc7644d1437898ae5b0fa8073c5ae1442c3ba
|
[
"Apache-2.0"
] | null | null | null |
pyxai/pyxai.py
|
ruivieira/python-experiments
|
c25cc7644d1437898ae5b0fa8073c5ae1442c3ba
|
[
"Apache-2.0"
] | null | null | null |
"""A library for AI/ML explainability"""
# INFO: A library for AI/ML explainability
| 28
| 42
| 0.738095
| 13
| 84
| 4.769231
| 0.538462
| 0.258065
| 0.354839
| 0.419355
| 0.935484
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 84
| 2
| 43
| 42
| 0.861111
| 0.904762
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
06ddb2ce9ee5eacb85c348c058081a33b1f1c374
| 92
|
py
|
Python
|
parameters_8000.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8000.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8000.py
|
hooshingschaefer/webapps
|
f812fc6eaec4d9620e660e33af473539c3f33c31
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$be04abf38bec91a3$f81d6d31f110ab44caf6dba0940bcfe455331c79"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.43956
| 0.01087
| 92
| 1
| 92
| 92
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
06f6ce29593ee11b5de2ca4f785129ba7d311d22
| 115
|
py
|
Python
|
pyspawn/qm_hamiltonian/__init__.py
|
dfedorov1988/MCDMS
|
215e18327ad9f806d82eb54d101d657d0ac29bd6
|
[
"MIT"
] | 3
|
2019-03-01T21:07:12.000Z
|
2019-05-03T18:47:38.000Z
|
pyspawn/qm_hamiltonian/__init__.py
|
dfedorov1988/MCDMS
|
215e18327ad9f806d82eb54d101d657d0ac29bd6
|
[
"MIT"
] | null | null | null |
pyspawn/qm_hamiltonian/__init__.py
|
dfedorov1988/MCDMS
|
215e18327ad9f806d82eb54d101d657d0ac29bd6
|
[
"MIT"
] | null | null | null |
import pyspawn.qm_hamiltonian.adiabatic
import pyspawn.qm_hamiltonian.dgas
import pyspawn.qm_hamiltonian.ehrenfest
| 28.75
| 39
| 0.895652
| 15
| 115
| 6.666667
| 0.466667
| 0.39
| 0.45
| 0.78
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052174
| 115
| 3
| 40
| 38.333333
| 0.917431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
662adaaf56f355f55beadf40b49bfbed1e029158
| 253
|
py
|
Python
|
test1.py
|
tapika/test_stackless
|
09db0546667973c102c859e3961052f8d834f765
|
[
"MIT"
] | null | null | null |
test1.py
|
tapika/test_stackless
|
09db0546667973c102c859e3961052f8d834f765
|
[
"MIT"
] | null | null | null |
test1.py
|
tapika/test_stackless
|
09db0546667973c102c859e3961052f8d834f765
|
[
"MIT"
] | null | null | null |
import stackless
import time
print('-> test1')
#time.sleep(0.5)
stackless.schedule()
time.sleep(0.01)
#stackless.schedule()
#time.sleep(0.5)
#stackless.schedule()
#time.sleep(0.5)
stackless.schedule()
#time.sleep(0.5)
print('<- test1')
| 15.8125
| 22
| 0.667984
| 36
| 253
| 4.694444
| 0.277778
| 0.266272
| 0.295858
| 0.260355
| 0.721893
| 0.721893
| 0.721893
| 0.721893
| 0.721893
| 0.497041
| 0
| 0.059361
| 0.134387
| 253
| 15
| 23
| 16.866667
| 0.712329
| 0.395257
| 0
| 0.285714
| 0
| 0
| 0.122137
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0.285714
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b07aa32ae220f916d6bd12681a11232167f8bef7
| 700
|
py
|
Python
|
exam/fixtures.py
|
paolostivanin/exam
|
27dc53a703349ec09433a6b989d6fc32ad523c0b
|
[
"MIT"
] | 46
|
2015-01-31T10:27:51.000Z
|
2020-11-28T19:04:11.000Z
|
exam/fixtures.py
|
paolostivanin/exam
|
27dc53a703349ec09433a6b989d6fc32ad523c0b
|
[
"MIT"
] | 7
|
2015-02-16T17:14:20.000Z
|
2022-01-05T04:21:27.000Z
|
exam/fixtures.py
|
paolostivanin/exam
|
27dc53a703349ec09433a6b989d6fc32ad523c0b
|
[
"MIT"
] | 10
|
2015-02-08T22:47:58.000Z
|
2020-10-12T09:51:19.000Z
|
#: A string representation of a 2px square GIF, suitable for use in PIL.
two_px_square_image = (
'GIF87a\x02\x00\x02\x00\xb3\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00' +
'\x00\x00\x00\x00\x00\x00\x00\xff\x00\xff\x00\x00\x00\x00\x00\x00' +
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
'\x00\x00\x00\x00\x00\x00\x00\x00,\x00\x00\x00\x00\x02\x00\x02\x00' +
'\x00\x04\x04\x10\x94\x02"\x00;'
)
#: A string representation of a 1px square GIF, suitable for use in PIL.
one_px_spacer = (
'GIF89a\x01\x00\x01\x00\x80\x00\x00\xdb\xdf\xef\x00\x00\x00!\xf9\x04' +
'\x01\x00\x00\x00\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D' +
'\x01\x00;'
)
| 43.75
| 75
| 0.662857
| 138
| 700
| 3.326087
| 0.246377
| 0.745098
| 0.960784
| 1.150327
| 0.618736
| 0.514161
| 0.46841
| 0.346405
| 0.294118
| 0.294118
| 0
| 0.318555
| 0.13
| 700
| 15
| 76
| 46.666667
| 0.43514
| 0.202857
| 0
| 0
| 0
| 0.5
| 0.77518
| 0.758993
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0b52655ade651a6874560f951d7253df62d5529
| 3,073
|
py
|
Python
|
navicatGA/test/test_float.py
|
lcmd-epfl/NaviCatGA
|
7317f3420a4a9124ebbc7224aaf46d3f27830d0a
|
[
"MIT"
] | 1
|
2022-03-07T13:16:37.000Z
|
2022-03-07T13:16:37.000Z
|
navicatGA/test/test_float.py
|
lcmd-epfl/NaviCatGA
|
7317f3420a4a9124ebbc7224aaf46d3f27830d0a
|
[
"MIT"
] | null | null | null |
navicatGA/test/test_float.py
|
lcmd-epfl/NaviCatGA
|
7317f3420a4a9124ebbc7224aaf46d3f27830d0a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from navicatGA.float_solver import FloatGenAlgSolver
from navicatGA.fitness_functions_float import fitness_function_float
def test_float_08():
solver = FloatGenAlgSolver(
n_genes=6,
pop_size=50,
max_gen=50,
mutation_rate=0.05,
selection_rate=0.25,
variables_limits=(0, 1),
fitness_function=fitness_function_float(10),
selection_strategy="roulette_wheel",
logger_level="TRACE",
n_crossover_points=1,
random_state=420,
to_file=False,
verbose=True,
)
solver.solve()
print(
"The maximum of the Hartmann6 function is found at (0.2, 0.15, 0.47, 0.27, 0.31, 0.65)"
)
print(
"The GA run found a maxima at ({0}, {1}, {2}, {3}, {4}, {5})".format(
solver.best_individual_[0],
solver.best_individual_[1],
solver.best_individual_[2],
solver.best_individual_[3],
solver.best_individual_[4],
solver.best_individual_[5],
)
)
solver.close_solver_logger()
def test_float_09():
solver = FloatGenAlgSolver(
n_genes=6,
pop_size=50,
max_gen=50,
mutation_rate=0.05,
selection_rate=0.25,
variables_limits=(0, 1),
fitness_function=fitness_function_float(10),
selection_strategy="tournament",
logger_level="TRACE",
n_crossover_points=2,
random_state=420,
to_file=False,
verbose=True,
)
solver.solve()
print(
"The maximum of the Hartmann6 function is found at (0.2, 0.15, 0.47, 0.27, 0.31, 0.65)"
)
print(
"The GA run found a maxima at ({0}, {1}, {2}, {3}, {4}, {5})".format(
solver.best_individual_[0],
solver.best_individual_[1],
solver.best_individual_[2],
solver.best_individual_[3],
solver.best_individual_[4],
solver.best_individual_[5],
)
)
solver.close_solver_logger()
def test_float_10():
solver = FloatGenAlgSolver(
n_genes=6,
pop_size=50,
max_gen=50,
mutation_rate=0.05,
selection_rate=0.25,
variables_limits=(0, 1),
fitness_function=fitness_function_float(10),
selection_strategy="random",
logger_level="TRACE",
n_crossover_points=3,
random_state=420,
to_file=False,
verbose=True,
)
solver.solve()
print(
"The maximum of the Hartmann6 function is found at (0.2, 0.15, 0.47, 0.27, 0.31, 0.65)"
)
print(
"The GA run found a maxima at ({0}, {1}, {2}, {3}, {4}, {5})".format(
solver.best_individual_[0],
solver.best_individual_[1],
solver.best_individual_[2],
solver.best_individual_[3],
solver.best_individual_[4],
solver.best_individual_[5],
)
)
solver.close_solver_logger()
if __name__ == "__main__":
test_float_08()
test_float_09()
test_float_10()
| 27.936364
| 95
| 0.582493
| 386
| 3,073
| 4.34456
| 0.209845
| 0.107335
| 0.214669
| 0.051878
| 0.871199
| 0.871199
| 0.813953
| 0.813953
| 0.813953
| 0.813953
| 0
| 0.074212
| 0.298405
| 3,073
| 109
| 96
| 28.192661
| 0.703618
| 0.006834
| 0
| 0.727273
| 0
| 0.060606
| 0.158964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.020202
| 0
| 0.050505
| 0.060606
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.